This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-4.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.1 by this push:
new e3b4066acd73 [SPARK-54679][SQL] Rename
`spark.sql.(xml.legacyXMLParser.enabled -> legacy.useLegacyXMLParser)`
e3b4066acd73 is described below
commit e3b4066acd7321335f95dac9191a42abc55e760a
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Dec 10 18:02:08 2025 -0800
[SPARK-54679][SQL] Rename `spark.sql.(xml.legacyXMLParser.enabled ->
legacy.useLegacyXMLParser)`
### What changes were proposed in this pull request?
This PR aims to use `spark.sql.legacy` namespace instead of adding a new
`spark.sql.xml` namespace for a single legacy behavior.
### Why are the changes needed?
For simplify by reusing the existing `spark.sql.legacy` namespace.
### Does this PR introduce _any_ user-facing change?
No. This configuration is added at Apache Spark 4.1.0 via the following.
- https://github.com/apache/spark/pull/51287
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #53433 from dongjoon-hyun/SPARK-54679.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
(cherry picked from commit 2eabf4d08b1561d67d9526e0400a09a34d8a58df)
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | 2 +-
.../spark/sql/execution/datasources/xml/XmlInferSchemaSuite.scala | 2 +-
.../spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala | 3 ++-
.../org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala | 2 +-
.../apache/spark/sql/execution/datasources/xml/XmlVariantSuite.scala | 2 +-
5 files changed, 6 insertions(+), 5 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index a4cee92e38c2..c4c46bbe67f1 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -6590,7 +6590,7 @@ object SQLConf {
.createWithDefault(false)
val LEGACY_XML_PARSER_ENABLED = {
- buildConf("spark.sql.xml.legacyXMLParser.enabled")
+ buildConf("spark.sql.legacy.useLegacyXMLParser")
.internal()
.doc(
"When set to true, use the legacy XML parser for parsing XML files. " +
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlInferSchemaSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlInferSchemaSuite.scala
index 9f36cb3ad2cd..05e5d6d17628 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlInferSchemaSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlInferSchemaSuite.scala
@@ -46,7 +46,7 @@ class XmlInferSchemaSuite
protected val legacyParserEnabled: Boolean = false
override protected def sparkConf: SparkConf = super.sparkConf
- .set("spark.sql.xml.legacyXMLParser.enabled", legacyParserEnabled.toString)
+ .set(SQLConf.LEGACY_XML_PARSER_ENABLED, legacyParserEnabled)
private val baseOptions = Map("rowTag" -> "ROW")
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
index 22bb66daf313..2a5a36c74a28 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
@@ -21,6 +21,7 @@ import org.scalatest.matchers.should.Matchers
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.internal.SQLConf
/**
* Tests various cases of partition size, compression.
@@ -33,7 +34,7 @@ class XmlPartitioningSuite extends SparkFunSuite with
Matchers with BeforeAndAft
.master("local[2]")
.appName("XmlPartitioningSuite")
.config("spark.hadoop.fs.local.block.size", blockSize)
- .config("spark.sql.xml.legacyXMLParser.enabled", legacyParserEnabled)
+ .config(SQLConf.LEGACY_XML_PARSER_ENABLED.key, legacyParserEnabled)
.getOrCreate()
try {
val fileName = s"test-data/xml-resources/fias_house${if (large) ".large"
else ""}.xml$suffix"
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala
index ce5194864c10..06845cdf8120 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala
@@ -63,7 +63,7 @@ class XmlSuite
protected val legacyParserEnabled: Boolean = false
override protected def sparkConf: SparkConf = super.sparkConf
- .set("spark.sql.xml.legacyXMLParser.enabled", legacyParserEnabled.toString)
+ .set(SQLConf.LEGACY_XML_PARSER_ENABLED, legacyParserEnabled)
protected val resDir = "test-data/xml-resources/"
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlVariantSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlVariantSuite.scala
index 15713e759b81..5738cd2a9927 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlVariantSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlVariantSuite.scala
@@ -34,7 +34,7 @@ class XmlVariantSuite extends QueryTest with
SharedSparkSession with TestXmlData
protected val legacyParserEnabled: Boolean = false
override protected def sparkConf: SparkConf = super.sparkConf
- .set("spark.sql.xml.legacyXMLParser.enabled", legacyParserEnabled.toString)
+ .set(SQLConf.LEGACY_XML_PARSER_ENABLED, legacyParserEnabled)
private val baseOptions = Map("rowTag" -> "ROW", "valueTag" -> "_VALUE",
"attributePrefix" -> "_")
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]