This is an automated email from the ASF dual-hosted git repository. lixiao pushed a commit to branch branch-3.0 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push: new 1701f78 [SPARK-31234][SQL][FOLLOW-UP] ResetCommand should not affect static SQL Configuration 1701f78 is described below commit 1701f7882aac9e3efaa36c628815edfad09b62fa Author: gatorsmile <gatorsm...@gmail.com> AuthorDate: Mon Apr 20 13:08:55 2020 -0700 [SPARK-31234][SQL][FOLLOW-UP] ResetCommand should not affect static SQL Configuration ### What changes were proposed in this pull request? This PR is the follow-up PR of https://github.com/apache/spark/pull/28003 - add a migration guide - add an end-to-end test case. ### Why are the changes needed? The original PR made the major behavior change in the user-facing RESET command. ### Does this PR introduce any user-facing change? No ### How was this patch tested? Added a new end-to-end test Closes #28265 from gatorsmile/spark-31234followup. Authored-by: gatorsmile <gatorsm...@gmail.com> Signed-off-by: gatorsmile <gatorsm...@gmail.com> (cherry picked from commit 6c792a79c10e7b01bd040ef14c848a2a2378e28c) Signed-off-by: gatorsmile <gatorsm...@gmail.com> --- docs/core-migration-guide.md | 2 +- docs/sql-migration-guide.md | 4 ++++ .../org/apache/spark/sql/internal/StaticSQLConf.scala | 3 +++ .../org/apache/spark/sql/internal/SharedState.scala | 3 --- .../org/apache/spark/sql/SparkSessionBuilderSuite.scala | 16 ++++++++++++++++ .../org/apache/spark/sql/internal/SQLConfSuite.scala | 2 +- 6 files changed, 25 insertions(+), 5 deletions(-) diff --git a/docs/core-migration-guide.md b/docs/core-migration-guide.md index cde6e07..33406d0 100644 --- a/docs/core-migration-guide.md +++ b/docs/core-migration-guide.md @@ -25,7 +25,7 @@ license: | ## Upgrading from Core 2.4 to 3.0 - The `org.apache.spark.ExecutorPlugin` interface and related configuration has been replaced with - `org.apache.spark.plugin.SparkPlugin`, which adds new functionality. Plugins using the old + `org.apache.spark.api.plugin.SparkPlugin`, which adds new functionality. Plugins using the old interface must be modified to extend the new interfaces. Check the [Monitoring](monitoring.html) guide for more details. diff --git a/docs/sql-migration-guide.md b/docs/sql-migration-guide.md index f5c81e9..8945c13 100644 --- a/docs/sql-migration-guide.md +++ b/docs/sql-migration-guide.md @@ -210,6 +210,10 @@ license: | * The decimal string representation can be different between Hive 1.2 and Hive 2.3 when using `TRANSFORM` operator in SQL for script transformation, which depends on hive's behavior. In Hive 1.2, the string representation omits trailing zeroes. But in Hive 2.3, it is always padded to 18 digits with trailing zeroes if necessary. +## Upgrading from Spark SQL 2.4.5 to 2.4.6 + + - In Spark 2.4.6, the `RESET` command does not reset the static SQL configuration values to the default. It only clears the runtime SQL configuration values. + ## Upgrading from Spark SQL 2.4.4 to 2.4.5 - Since Spark 2.4.5, `TRUNCATE TABLE` command tries to set back original permission and ACLs during re-creating the table/partition paths. To restore the behaviour of earlier versions, set `spark.sql.truncateTable.ignorePermissionAcl.enabled` to `true`. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala index d202528..9618ff6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala @@ -47,6 +47,9 @@ object StaticSQLConf { .internal() .version("2.1.0") .stringConf + // System preserved database should not exists in metastore. However it's hard to guarantee it + // for every session, because case-sensitivity differs. Here we always lowercase it to make our + // life easier. .transform(_.toLowerCase(Locale.ROOT)) .createWithDefault("global_temp") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala index 14b8ea6..47119ab 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala @@ -153,9 +153,6 @@ private[sql] class SharedState( * A manager for global temporary views. */ lazy val globalTempViewManager: GlobalTempViewManager = { - // System preserved database should not exists in metastore. However it's hard to guarantee it - // for every session, because case-sensitivity differs. Here we always lowercase it to make our - // life easier. val globalTempDB = conf.get(GLOBAL_TEMP_DATABASE) if (externalCatalog.databaseExists(globalTempDB)) { throw new SparkException( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala index 10b1757..f238641 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala @@ -22,6 +22,7 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.internal.config.UI.UI_ENABLED import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.internal.StaticSQLConf.GLOBAL_TEMP_DATABASE /** * Test cases for the builder pattern of [[SparkSession]]. @@ -152,4 +153,19 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach { session.sparkContext.hadoopConfiguration.unset(mySpecialKey) } } + + test("SPARK-31234: RESET command will not change static sql configs and " + + "spark context conf values in SessionState") { + val session = SparkSession.builder() + .master("local") + .config(GLOBAL_TEMP_DATABASE.key, value = "globalTempDB-SPARK-31234") + .config("spark.app.name", "test-app-SPARK-31234") + .getOrCreate() + + assert(session.sessionState.conf.getConfString("spark.app.name") === "test-app-SPARK-31234") + assert(session.sessionState.conf.getConf(GLOBAL_TEMP_DATABASE) === "globaltempdb-spark-31234") + session.sql("RESET") + assert(session.sessionState.conf.getConfString("spark.app.name") === "test-app-SPARK-31234") + assert(session.sessionState.conf.getConf(GLOBAL_TEMP_DATABASE) === "globaltempdb-spark-31234") + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index f389465..feccf52 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -116,7 +116,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { } } - test("reset will not change static sql configs and spark core configs") { + test("SPARK-31234: reset will not change static sql configs and spark core configs") { val conf = spark.sparkContext.getConf.getAll.toMap val appName = conf.get("spark.app.name") val driverHost = conf.get("spark.driver.host") --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org