Repository: spark
Updated Branches:
  refs/heads/branch-2.0 4939c2a12 -> d3cd579d7


[SPARK-15532][SQL] SQLContext/HiveContext's public constructors should use 
SparkSession.build.getOrCreate

## What changes were proposed in this pull request?
This PR changes SQLContext/HiveContext's public constructor to use 
SparkSession.build.getOrCreate and removes isRootContext from SQLContext.

## How was this patch tested?
Existing tests.

Author: Yin Huai <yh...@databricks.com>

Closes #13310 from yhuai/SPARK-15532.

(cherry picked from commit 3ac2363d757cc9cebc627974f17ecda3a263efdf)
Signed-off-by: Yin Huai <yh...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d3cd579d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d3cd579d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d3cd579d

Branch: refs/heads/branch-2.0
Commit: d3cd579d7cdc1d2642186041dfbe296b266e4069
Parents: 4939c2a
Author: Yin Huai <yh...@databricks.com>
Authored: Thu May 26 16:53:31 2016 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Thu May 26 16:54:03 2016 -0700

----------------------------------------------------------------------
 .../spark/mllib/classification/LogisticRegression.scala   |  2 +-
 project/MimaExcludes.scala                                |  2 ++
 .../src/main/scala/org/apache/spark/sql/SQLContext.scala  | 10 ++--------
 .../main/scala/org/apache/spark/sql/SparkSession.scala    |  4 ++--
 .../scala/org/apache/spark/sql/hive/HiveContext.scala     | 10 ++++------
 .../scala/org/apache/spark/sql/hive/test/TestHive.scala   |  9 ++++-----
 6 files changed, 15 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/d3cd579d/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
index adbcdd3..4bba2ea 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
@@ -437,7 +437,7 @@ class LogisticRegressionWithLBFGS
         lr.setMaxIter(optimizer.getNumIterations())
         lr.setTol(optimizer.getConvergenceTol())
         // Convert our input into a DataFrame
-        val sqlContext = new SQLContext(input.context)
+        val sqlContext = SQLContext.getOrCreate(input.context)
         import sqlContext.implicits._
         val df = input.map(_.asML).toDF()
         // Determine if we should cache the DF

http://git-wip-us.apache.org/repos/asf/spark/blob/d3cd579d/project/MimaExcludes.scala
----------------------------------------------------------------------
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 08c575a..73debe9 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -54,6 +54,8 @@ object MimaExcludes {
         
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.rdd.RDD.coalesce"),
         
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.rdd.PartitionCoalescer$LocationIterator"),
         
ProblemFilters.exclude[IncompatibleTemplateDefProblem]("org.apache.spark.rdd.PartitionCoalescer"),
+        // SPARK-15532 Remove isRootContext flag from SQLContext.
+        
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SQLContext.isRootContext"),
         // SPARK-12600 Remove SQL deprecated methods
         
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLContext$QueryExecution"),
         
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLContext$SparkPlanner"),

http://git-wip-us.apache.org/repos/asf/spark/blob/d3cd579d/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index b17fb8a..66d9aa2 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -57,9 +57,7 @@ import org.apache.spark.sql.util.ExecutionListenerManager
  * @groupname Ungrouped Support functions for language integrated queries
  * @since 1.0.0
  */
-class SQLContext private[sql](
-    val sparkSession: SparkSession,
-    val isRootContext: Boolean)
+class SQLContext private[sql](val sparkSession: SparkSession)
   extends Logging with Serializable {
 
   self =>
@@ -69,13 +67,9 @@ class SQLContext private[sql](
   // Note: Since Spark 2.0 this class has become a wrapper of SparkSession, 
where the
   // real functionality resides. This class remains mainly for backward 
compatibility.
 
-  private[sql] def this(sparkSession: SparkSession) = {
-    this(sparkSession, true)
-  }
-
   @deprecated("Use SparkSession.builder instead", "2.0.0")
   def this(sc: SparkContext) = {
-    this(new SparkSession(sc))
+    this(SparkSession.builder().sparkContext(sc).getOrCreate())
   }
 
   @deprecated("Use SparkSession.builder instead", "2.0.0")

http://git-wip-us.apache.org/repos/asf/spark/blob/d3cd579d/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index a36368a..5dabe0e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -25,7 +25,7 @@ import scala.reflect.ClassTag
 import scala.reflect.runtime.universe.TypeTag
 import scala.util.control.NonFatal
 
-import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.{SparkConf, SparkContext, SparkException}
 import org.apache.spark.annotation.{DeveloperApi, Experimental}
 import org.apache.spark.api.java.JavaRDD
 import org.apache.spark.internal.Logging
@@ -40,7 +40,7 @@ import 
org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Range}
 import org.apache.spark.sql.execution._
 import org.apache.spark.sql.execution.datasources.LogicalRelation
 import org.apache.spark.sql.execution.ui.SQLListener
-import org.apache.spark.sql.internal.{CatalogImpl, SessionState, SharedState}
+import org.apache.spark.sql.internal.{CatalogImpl, SessionState, SharedState, 
SQLConf}
 import org.apache.spark.sql.sources.BaseRelation
 import org.apache.spark.sql.types.{DataType, LongType, StructType}
 import org.apache.spark.sql.util.ExecutionListenerManager

http://git-wip-us.apache.org/repos/asf/spark/blob/d3cd579d/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 415d4c0..3cfe932 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -28,15 +28,13 @@ import org.apache.spark.sql.{SparkSession, SQLContext}
  * Configuration for Hive is read from hive-site.xml on the classpath.
  */
 @deprecated("Use SparkSession.builder.enableHiveSupport instead", "2.0.0")
-class HiveContext private[hive](
-    _sparkSession: SparkSession,
-    isRootContext: Boolean)
-  extends SQLContext(_sparkSession, isRootContext) with Logging {
+class HiveContext private[hive](_sparkSession: SparkSession)
+  extends SQLContext(_sparkSession) with Logging {
 
   self =>
 
   def this(sc: SparkContext) = {
-    this(new SparkSession(HiveUtils.withHiveExternalCatalog(sc)), true)
+    
this(SparkSession.builder().sparkContext(HiveUtils.withHiveExternalCatalog(sc)).getOrCreate())
   }
 
   def this(sc: JavaSparkContext) = this(sc.sc)
@@ -47,7 +45,7 @@ class HiveContext private[hive](
    * and Hive client (both of execution and metadata) with existing 
HiveContext.
    */
   override def newSession(): HiveContext = {
-    new HiveContext(sparkSession.newSession(), isRootContext = false)
+    new HiveContext(sparkSession.newSession())
   }
 
   protected[sql] override def sessionState: HiveSessionState = {

http://git-wip-us.apache.org/repos/asf/spark/blob/d3cd579d/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index 2f6aa36..81964db 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -70,16 +70,15 @@ object TestHive
  * test cases that rely on TestHive must be serialized.
  */
 class TestHiveContext(
-    @transient override val sparkSession: TestHiveSparkSession,
-    isRootContext: Boolean)
-  extends SQLContext(sparkSession, isRootContext) {
+    @transient override val sparkSession: TestHiveSparkSession)
+  extends SQLContext(sparkSession) {
 
   def this(sc: SparkContext) {
-    this(new TestHiveSparkSession(HiveUtils.withHiveExternalCatalog(sc)), true)
+    this(new TestHiveSparkSession(HiveUtils.withHiveExternalCatalog(sc)))
   }
 
   override def newSession(): TestHiveContext = {
-    new TestHiveContext(sparkSession.newSession(), false)
+    new TestHiveContext(sparkSession.newSession())
   }
 
   override def sharedState: TestHiveSharedState = sparkSession.sharedState


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to