This is an automated email from the ASF dual-hosted git repository. yao pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push: new 2d9a9634b64 [SPARK-42448][SQL] Fix spark sql shell prompt for current db 2d9a9634b64 is described below commit 2d9a9634b64a55022add15ca4641e92813e88450 Author: Kent Yao <y...@apache.org> AuthorDate: Thu Feb 23 16:46:36 2023 +0800 [SPARK-42448][SQL] Fix spark sql shell prompt for current db ### What changes were proposed in this pull request? The CliSessionState does not contain the current database info, we shall use spark's `catalog.currentDatabase` ### Why are the changes needed? bugfix ### Does this PR introduce _any_ user-facing change? yes, when users use spark-sql and switch database, the prompt now show the correct one instead of `default` ### How was this patch tested? locally tested ```textmate spark-sql (default)> create database abc; Time taken: 0.24 seconds spark-sql (default)> use abc; Time taken: 0.027 seconds spark-sql (ABC)> ```` Closes #40036 from yaooqinn/SPARK-42448. Authored-by: Kent Yao <y...@apache.org> Signed-off-by: Kent Yao <y...@apache.org> (cherry picked from commit 2478342f834152ab33aa283816e6a0f346b64e44) Signed-off-by: Kent Yao <y...@apache.org> --- .../org/apache/spark/sql/internal/SQLConf.scala | 8 ++++++++ .../sql/hive/thriftserver/SparkSQLCLIDriver.scala | 12 ++++++++++-- .../spark/sql/hive/thriftserver/CliSuite.scala | 22 ++++++++++++++++++++-- 3 files changed, 38 insertions(+), 4 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index e764e0510d9..25e45dce181 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -4024,6 +4024,14 @@ object SQLConf { .booleanConf .createWithDefault(false) + val LEGACY_EMPTY_CURRENT_DB_IN_CLI = + buildConf("spark.sql.legacy.emptyCurrentDBInCli") + .internal() + .doc("When false, spark-sql CLI prints the the current database in prompt") + .version("3.4.0") + .booleanConf + .createWithDefault(false) + val LEGACY_KEEP_COMMAND_OUTPUT_SCHEMA = buildConf("spark.sql.legacy.keepCommandOutputSchema") .internal() diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index b7a6dad162f..7ed5e7a7611 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -48,6 +48,7 @@ import org.apache.spark.sql.hive.HiveUtils import org.apache.spark.sql.hive.client.HiveClientImpl import org.apache.spark.sql.hive.security.HiveDelegationTokenProvider import org.apache.spark.sql.internal.SharedState +import org.apache.spark.sql.internal.SQLConf.LEGACY_EMPTY_CURRENT_DB_IN_CLI import org.apache.spark.util.ShutdownHookManager import org.apache.spark.util.SparkExitCode._ @@ -278,8 +279,15 @@ private[hive] object SparkSQLCLIDriver extends Logging { var ret = 0 var prefix = "" - val currentDB = ReflectionUtils.invokeStatic(classOf[CliDriver], "getFormattedDb", - classOf[HiveConf] -> conf, classOf[CliSessionState] -> sessionState) + + def currentDB = { + if (!SparkSQLEnv.sqlContext.conf.getConf(LEGACY_EMPTY_CURRENT_DB_IN_CLI)) { + s" (${SparkSQLEnv.sqlContext.sparkSession.catalog.currentDatabase})" + } else { + ReflectionUtils.invokeStatic(classOf[CliDriver], "getFormattedDb", + classOf[HiveConf] -> conf, classOf[CliSessionState] -> sessionState) + } + } def promptWithCurrentDB: String = s"$prompt$currentDB" def continuedPromptWithDBSpaces: String = continuedPrompt + ReflectionUtils.invokeStatic( diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala index f73b1b8e68a..5413635ba47 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala @@ -91,7 +91,8 @@ class CliSuite extends SparkFunSuite { errorResponses: Seq[String] = Seq("Error:"), maybeWarehouse: Option[File] = Some(warehousePath), useExternalHiveFile: Boolean = false, - metastore: File = metastorePath)( + metastore: File = metastorePath, + prompt: String = "spark-sql>")( queriesAndExpectedAnswers: (String, String)*): Unit = { // Explicitly adds ENTER for each statement to make sure they are actually entered into the CLI. @@ -105,7 +106,7 @@ class CliSuite extends SparkFunSuite { } else { // spark-sql echoes the submitted queries val xs = query.split("\n").toList - val queryEcho = s"spark-sql> ${xs.head}" :: xs.tail.map(l => s" > $l") + val queryEcho = s"$prompt ${xs.head}" :: xs.tail.map(l => s" > $l") // longer lines sometimes get split in the output, // match the first 60 characters of each query line queryEcho.map(_.take(60)) :+ answer @@ -127,6 +128,7 @@ class CliSuite extends SparkFunSuite { | --driver-java-options -Dderby.system.durability=test | $extraHive | --conf spark.ui.enabled=false + | --conf ${SQLConf.LEGACY_EMPTY_CURRENT_DB_IN_CLI.key}=true | --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$jdbcUrl | --hiveconf ${ConfVars.SCRATCHDIR}=$scratchDirPath | --hiveconf conf1=conftest @@ -788,4 +790,20 @@ class CliSuite extends SparkFunSuite { "--conf", s"${StaticSQLConf.CATALOG_DEFAULT_DATABASE.key}=spark_35242"))( "show tables;" -> "spark_test") } + + test("SPARK-42448: Print correct database in prompt") { + runCliWithin( + 2.minute, + Seq("--conf", s"${SQLConf.LEGACY_EMPTY_CURRENT_DB_IN_CLI.key}=false"), + prompt = "spark-sql (default)>")( + "set abc;" -> "abc\t<undefined>", + "create database spark_42448;" -> "") + + runCliWithin( + 2.minute, + Seq("--conf", s"${SQLConf.LEGACY_EMPTY_CURRENT_DB_IN_CLI.key}=false", "--database", + "spark_42448"), + prompt = "spark-sql (spark_42448)>")( + "select current_database();" -> "spark_42448") + } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org