Github user andrewor14 commented on a diff in the pull request: https://github.com/apache/spark/pull/8909#discussion_r41462815 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala --- @@ -1196,49 +1174,101 @@ class SQLContext(@transient val sparkContext: SparkContext) // Register a succesfully instantiatd context to the singleton. This should be at the end of // the class definition so that the singleton is updated only if there is no exception in the // construction of the instance. - SQLContext.setLastInstantiatedContext(self) + sparkContext.addSparkListener(new SparkListener{ + override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd) { + SQLContext.clearTheInstantiatedContext(self) + } + }) + + SQLContext.setInstantiatedContext(self) } /** * This SQLContext object contains utility functions to create a singleton SQLContext instance, - * or to get the last created SQLContext instance. + * or to get the created SQLContext instance. */ object SQLContext { private val INSTANTIATION_LOCK = new Object() /** - * Reference to the last created SQLContext. + * The active SQLContext for the current thread. */ - @transient private val lastInstantiatedContext = new AtomicReference[SQLContext]() + private val activeContext: InheritableThreadLocal[SQLContext] = + new InheritableThreadLocal[SQLContext] + + /** + * Reference to the created SQLContext. + */ + @transient private val instantiatedContext = new AtomicReference[SQLContext]() --- End diff -- if we absolutely have to keep both, then we should expand on the comments to explain why a thread local context is needed in addition to one that was initially instantiated.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org