This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new cff7baef736d [SPARK-53526][SQL] Enable SQL scripting by default
cff7baef736d is described below
commit cff7baef736dda3f8be700841515e57850e71b5d
Author: Dušan Tišma <[email protected]>
AuthorDate: Tue Sep 9 11:29:56 2025 +0800
[SPARK-53526][SQL] Enable SQL scripting by default
### What changes were proposed in this pull request?
This PR changes default value of `spark.sql.scripting.enabled` in order to
enable SQL scripting by default.
### Why are the changes needed?
### Does this PR introduce _any_ user-facing change?
Yes, the default value of `spark.sql.scripting.enabled` is changed.
### How was this patch tested?
Existing tests.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #52272 from dusantism-db/enable-sql-scripting-by-default.
Authored-by: Dušan Tišma <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
.../scala/org/apache/spark/sql/internal/SQLConf.scala | 2 +-
.../sql/catalyst/parser/SqlScriptingParserSuite.scala | 12 ------------
.../sql/execution/ExecuteImmediateEndToEndSuite.scala | 17 +++++++----------
.../spark/sql/scripting/SqlScriptingE2eSuite.scala | 1 -
.../sql/scripting/SqlScriptingExecutionSuite.scala | 1 -
.../sql/scripting/SqlScriptingInterpreterSuite.scala | 7 +------
6 files changed, 9 insertions(+), 31 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 12fd80c5a626..5a99814c8cc8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -4142,7 +4142,7 @@ object SQLConf {
"flow and error handling.")
.version("4.0.0")
.booleanConf
- .createWithDefault(false)
+ .createWithDefault(true)
val CONCAT_BINARY_AS_STRING =
buildConf("spark.sql.function.concatBinaryAsString")
.doc("When this option is set to false and all inputs are binary,
`functions.concat` returns " +
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
index abcea96f0831..9902374ce8e9 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
@@ -23,22 +23,10 @@ import org.apache.spark.sql.catalyst.plans.SQLHelper
import org.apache.spark.sql.catalyst.plans.logical.{CompoundBody,
CreateVariable, ExceptionHandler, ForStatement, IfElseStatement,
IterateStatement, LeaveStatement, LoopStatement, Project, RepeatStatement,
SearchedCaseStatement, SetVariable, SimpleCaseStatement, SingleStatement,
WhileStatement}
import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
import org.apache.spark.sql.exceptions.SqlScriptingException
-import org.apache.spark.sql.internal.SQLConf
class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper {
import CatalystSqlParser._
- // Tests setup
- protected override def beforeAll(): Unit = {
- super.beforeAll()
- conf.setConfString(SQLConf.SQL_SCRIPTING_ENABLED.key, "true")
- }
-
- protected override def afterAll(): Unit = {
- conf.unsetConf(SQLConf.SQL_SCRIPTING_ENABLED.key)
- super.afterAll()
- }
-
// Tests
test("single select") {
val sqlScriptText = "SELECT 1;"
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExecuteImmediateEndToEndSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExecuteImmediateEndToEndSuite.scala
index d8c24c1ac397..c252047b3abe 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExecuteImmediateEndToEndSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExecuteImmediateEndToEndSuite.scala
@@ -17,7 +17,6 @@
package org.apache.spark.sql.execution
import org.apache.spark.sql.{AnalysisException, QueryTest}
-import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
class ExecuteImmediateEndToEndSuite extends QueryTest with SharedSparkSession {
@@ -39,14 +38,12 @@ class ExecuteImmediateEndToEndSuite extends QueryTest with
SharedSparkSession {
}
test("SQL Scripting not supported inside EXECUTE IMMEDIATE") {
- withSQLConf(SQLConf.SQL_SCRIPTING_ENABLED.key -> "true") {
- val executeImmediateText = "EXECUTE IMMEDIATE 'BEGIN SELECT 1; END'"
- checkError(
- exception = intercept[AnalysisException ] {
- spark.sql(executeImmediateText)
- },
- condition = "SQL_SCRIPT_IN_EXECUTE_IMMEDIATE",
- parameters = Map("sqlString" -> "BEGIN SELECT 1; END"))
- }
+ val executeImmediateText = "EXECUTE IMMEDIATE 'BEGIN SELECT 1; END'"
+ checkError(
+ exception = intercept[AnalysisException ] {
+ spark.sql(executeImmediateText)
+ },
+ condition = "SQL_SCRIPT_IN_EXECUTE_IMMEDIATE",
+ parameters = Map("sqlString" -> "BEGIN SELECT 1; END"))
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingE2eSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingE2eSuite.scala
index 67305019e70d..e24407912eb0 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingE2eSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingE2eSuite.scala
@@ -58,7 +58,6 @@ class SqlScriptingE2eSuite extends QueryTest with
SharedSparkSession {
override protected def sparkConf: SparkConf = {
super.sparkConf
.set(SQLConf.ANSI_ENABLED.key, "true")
- .set(SQLConf.SQL_SCRIPTING_ENABLED.key, "true")
}
// Tests
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
index 0be9e5f0f578..7e6de2b990ff 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
@@ -40,7 +40,6 @@ class SqlScriptingExecutionSuite extends QueryTest with
SharedSparkSession {
override protected def sparkConf: SparkConf = {
super.sparkConf
.set(SQLConf.ANSI_ENABLED.key, "true")
- .set(SQLConf.SQL_SCRIPTING_ENABLED.key, "true")
}
// Helpers
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
index 0eafb1fbce2c..684a5a72e6d8 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.scripting
-import org.apache.spark.{SparkConf, SparkException, SparkNumberFormatException}
+import org.apache.spark.{SparkException, SparkNumberFormatException}
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.QueryPlanningTracker
import org.apache.spark.sql.catalyst.expressions.Expression
@@ -38,11 +38,6 @@ class SqlScriptingInterpreterSuite
with SharedSparkSession
with SqlScriptingTestUtils {
- // Tests setup
- override protected def sparkConf: SparkConf = {
- super.sparkConf.set(SQLConf.SQL_SCRIPTING_ENABLED.key, "true")
- }
-
// Helpers
private def runSqlScript(
sqlText: String,
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]