Github user yhuai commented on a diff in the pull request: https://github.com/apache/spark/pull/13310#discussion_r64823932 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala --- @@ -69,13 +67,30 @@ class SQLContext private[sql]( // Note: Since Spark 2.0 this class has become a wrapper of SparkSession, where the // real functionality resides. This class remains mainly for backward compatibility. - private[sql] def this(sparkSession: SparkSession) = { - this(sparkSession, true) - } - @deprecated("Use SparkSession.builder instead", "2.0.0") def this(sc: SparkContext) = { - this(new SparkSession(sc)) + this { + val session = new SparkSession(sc) + // If spark.sql.allowMultipleContexts is true, we will throw an exception if a user + // wants to create a new root SQLContext (a SQLContext that is not created by newSession). + val allowMultipleContexts = sc.conf.getBoolean( + SQLConf.ALLOW_MULTIPLE_CONTEXTS.key, + SQLConf.ALLOW_MULTIPLE_CONTEXTS.defaultValue.get) + + // Assert no root SQLContext/SparkSession is running when allowMultipleContexts is false. + { + val defaultSessionExists = SparkSession.getDefaultSession.isDefined + if (!allowMultipleContexts && defaultSessionExists) { + val errMsg = sc.conf.get( + SQLConf.ALLOW_MULTIPLE_CONTEXTS_ERROR_MESSAGE.key, + SQLConf.ALLOW_MULTIPLE_CONTEXTS_ERROR_MESSAGE.defaultValue.get) + throw new SparkException(errMsg) + } else if (!defaultSessionExists) { + SparkSession.setDefaultSession(session) + } + } + session + } --- End diff -- We need to do the same thing for HiveContext.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org