Repository: spark
Updated Branches:
  refs/heads/branch-2.2 6c628e75e -> b560c975b


Revert "[SPARK-20946][SQL] simplify the config setting logic in 
SparkSession.getOrCreate"

This reverts commit e11d90bf8deb553fd41b8837e3856c11486c2503.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b560c975
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b560c975
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b560c975

Branch: refs/heads/branch-2.2
Commit: b560c975b7cdc8828fc9e27cbca740c5e550b9cd
Parents: 6c628e7
Author: Yin Huai <yh...@databricks.com>
Authored: Fri Jun 2 15:36:21 2017 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Fri Jun 2 15:37:38 2017 -0700

----------------------------------------------------------------------
 .../spark/ml/recommendation/ALSSuite.scala      |  4 +++-
 .../apache/spark/ml/tree/impl/TreeTests.scala   |  2 ++
 .../org/apache/spark/sql/SparkSession.scala     | 25 +++++++++++++-------
 3 files changed, 21 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b560c975/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala 
b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
index 23f2256..701040f 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
@@ -820,13 +820,15 @@ class ALSCleanerSuite extends SparkFunSuite {
       FileUtils.listFiles(localDir, TrueFileFilter.INSTANCE, 
TrueFileFilter.INSTANCE).asScala.toSet
     try {
       conf.set("spark.local.dir", localDir.getAbsolutePath)
-      val sc = new SparkContext("local[2]", "ALSCleanerSuite", conf)
+      val sc = new SparkContext("local[2]", "test", conf)
       try {
         sc.setCheckpointDir(checkpointDir.getAbsolutePath)
         // Generate test data
         val (training, _) = ALSSuite.genImplicitTestData(sc, 20, 5, 1, 0.2, 0)
         // Implicitly test the cleaning of parents during ALS training
         val spark = SparkSession.builder
+          .master("local[2]")
+          .appName("ALSCleanerSuite")
           .sparkContext(sc)
           .getOrCreate()
         import spark.implicits._

http://git-wip-us.apache.org/repos/asf/spark/blob/b560c975/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala
----------------------------------------------------------------------
diff --git a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala 
b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala
index b6894b3..92a2369 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala
@@ -43,6 +43,8 @@ private[ml] object TreeTests extends SparkFunSuite {
       categoricalFeatures: Map[Int, Int],
       numClasses: Int): DataFrame = {
     val spark = SparkSession.builder()
+      .master("local[2]")
+      .appName("TreeTests")
       .sparkContext(data.sparkContext)
       .getOrCreate()
     import spark.implicits._

http://git-wip-us.apache.org/repos/asf/spark/blob/b560c975/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index bf37b76..d2bf350 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -757,8 +757,6 @@ object SparkSession {
 
     private[this] var userSuppliedContext: Option[SparkContext] = None
 
-    // The `SparkConf` inside the given `SparkContext` may get changed if you 
specify some options
-    // for this builder.
     private[spark] def sparkContext(sparkContext: SparkContext): Builder = 
synchronized {
       userSuppliedContext = Option(sparkContext)
       this
@@ -856,7 +854,7 @@ object SparkSession {
      *
      * @since 2.2.0
      */
-    def withExtensions(f: SparkSessionExtensions => Unit): Builder = 
synchronized {
+    def withExtensions(f: SparkSessionExtensions => Unit): Builder = {
       f(extensions)
       this
     }
@@ -901,14 +899,22 @@ object SparkSession {
 
         // No active nor global default session. Create a new one.
         val sparkContext = userSuppliedContext.getOrElse {
+          // set app name if not given
+          val randomAppName = java.util.UUID.randomUUID().toString
           val sparkConf = new SparkConf()
-          options.get("spark.master").foreach(sparkConf.setMaster)
-          // set a random app name if not given.
-          sparkConf.setAppName(options.getOrElse("spark.app.name",
-            java.util.UUID.randomUUID().toString))
-          SparkContext.getOrCreate(sparkConf)
+          options.foreach { case (k, v) => sparkConf.set(k, v) }
+          if (!sparkConf.contains("spark.app.name")) {
+            sparkConf.setAppName(randomAppName)
+          }
+          val sc = SparkContext.getOrCreate(sparkConf)
+          // maybe this is an existing SparkContext, update its SparkConf 
which maybe used
+          // by SparkSession
+          options.foreach { case (k, v) => sc.conf.set(k, v) }
+          if (!sc.conf.contains("spark.app.name")) {
+            sc.conf.setAppName(randomAppName)
+          }
+          sc
         }
-        options.foreach { case (k, v) => sparkContext.conf.set(k, v) }
 
         // Initialize extensions if the user has defined a configurator class.
         val extensionConfOption = 
sparkContext.conf.get(StaticSQLConf.SPARK_SESSION_EXTENSIONS)
@@ -929,6 +935,7 @@ object SparkSession {
         }
 
         session = new SparkSession(sparkContext, None, None, extensions)
+        options.foreach { case (k, v) => 
session.sessionState.conf.setConfString(k, v) }
         defaultSession.set(session)
 
         // Register a successfully instantiated context to the singleton. This 
should be at the


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to