Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19529#discussion_r146977773
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/test/SharedSQLContext.scala ---
    @@ -17,86 +17,8 @@
     
     package org.apache.spark.sql.test
     
    -import scala.concurrent.duration._
    -
    -import org.scalatest.BeforeAndAfterEach
    -import org.scalatest.concurrent.Eventually
    -
    -import org.apache.spark.{DebugFilesystem, SparkConf}
    -import org.apache.spark.sql.{SparkSession, SQLContext}
    -import org.apache.spark.sql.internal.SQLConf
    -
    -/**
    - * Helper trait for SQL test suites where all tests share a single 
[[TestSparkSession]].
    - */
    -trait SharedSQLContext extends SQLTestUtils with BeforeAndAfterEach with 
Eventually {
    -
    -  protected def sparkConf = {
    -    new SparkConf()
    -      .set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName)
    -      .set("spark.unsafe.exceptionOnMemoryLeak", "true")
    -      .set(SQLConf.CODEGEN_FALLBACK.key, "false")
    -  }
    -
    -  /**
    -   * The [[TestSparkSession]] to use for all tests in this suite.
    -   *
    -   * By default, the underlying [[org.apache.spark.SparkContext]] will be 
run in local
    -   * mode with the default test configurations.
    -   */
    -  private var _spark: TestSparkSession = null
    -
    -  /**
    -   * The [[TestSparkSession]] to use for all tests in this suite.
    -   */
    -  protected implicit def spark: SparkSession = _spark
    -
    -  /**
    -   * The [[TestSQLContext]] to use for all tests in this suite.
    -   */
    -  protected implicit def sqlContext: SQLContext = _spark.sqlContext
    -
    -  protected def createSparkSession: TestSparkSession = {
    -    new TestSparkSession(sparkConf)
    -  }
    -
    -  /**
    -   * Initialize the [[TestSparkSession]].
    -   */
    +trait SharedSQLContext extends SQLTestUtils with SharedSparkSession {
       protected override def beforeAll(): Unit = {
    -    SparkSession.sqlListener.set(null)
    -    if (_spark == null) {
    -      _spark = createSparkSession
    -    }
    -    // Ensure we have initialized the context before calling parent code
         super.beforeAll()
    --- End diff --
    
    If this `beforeAll` is just calling `super.beforeAll`, why do we still need 
to override it? 


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to