This is an automated email from the ASF dual-hosted git repository.

ulyssesyou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new 09974d3a4 [CORE] Fix gluten createOptional config contains Some (#5573)
09974d3a4 is described below

commit 09974d3a46278759488a737ad8d929d2a31559dc
Author: Xiduo You <ulyssesyo...@gmail.com>
AuthorDate: Tue Apr 30 14:15:00 2024 +0800

    [CORE] Fix gluten createOptional config contains Some (#5573)
---
 .../sql/execution/VeloxParquetReadSuite.scala      |  8 +++++++
 .../spark/sql/internal/GlutenConfigUtil.scala      | 26 +++++++++++++++++-----
 2 files changed, 29 insertions(+), 5 deletions(-)

diff --git 
a/backends-velox/src/test/scala/org/apache/spark/sql/execution/VeloxParquetReadSuite.scala
 
b/backends-velox/src/test/scala/org/apache/spark/sql/execution/VeloxParquetReadSuite.scala
index d5828c738..cb3eeaec6 100644
--- 
a/backends-velox/src/test/scala/org/apache/spark/sql/execution/VeloxParquetReadSuite.scala
+++ 
b/backends-velox/src/test/scala/org/apache/spark/sql/execution/VeloxParquetReadSuite.scala
@@ -16,14 +16,22 @@
  */
 package org.apache.spark.sql.execution
 
+import org.apache.gluten.GlutenConfig
 import org.apache.gluten.execution.{BasicScanExecTransformer, 
VeloxWholeStageTransformerSuite}
 
+import org.apache.spark.SparkConf
+
 import java.io.File
 
 class VeloxParquetReadSuite extends VeloxWholeStageTransformerSuite {
   override protected val resourcePath: String = "/parquet-for-read"
   override protected val fileFormat: String = "parquet"
 
+  override protected def sparkConf: SparkConf = {
+    super.sparkConf
+      .set(GlutenConfig.LOAD_QUANTUM.key, "128m")
+  }
+
   testWithSpecifiedSparkVersion("read example parquet files", Some("3.5"), 
Some("3.5")) {
     withTable("test_table") {
       val dir = new File(getClass.getResource(resourcePath).getFile)
diff --git 
a/shims/common/src/main/scala/org/apache/spark/sql/internal/GlutenConfigUtil.scala
 
b/shims/common/src/main/scala/org/apache/spark/sql/internal/GlutenConfigUtil.scala
index babb446d0..1a45572ac 100644
--- 
a/shims/common/src/main/scala/org/apache/spark/sql/internal/GlutenConfigUtil.scala
+++ 
b/shims/common/src/main/scala/org/apache/spark/sql/internal/GlutenConfigUtil.scala
@@ -21,11 +21,27 @@ import org.apache.spark.internal.config.ConfigReader
 import scala.collection.JavaConverters._
 
 object GlutenConfigUtil {
+  private def getConfString(reader: ConfigReader, key: String, value: String): 
String = {
+    Option(SQLConf.getConfigEntry(key))
+      .map {
+        _.readFrom(reader) match {
+          case o: Option[_] => o.map(_.toString).getOrElse(value)
+          case null => value
+          case v => v.toString
+        }
+      }
+      .getOrElse(value)
+  }
+
   def parseConfig(conf: Map[String, String]): Map[String, String] = {
-    val reader = new 
ConfigReader(conf.filter(_._1.contains("spark.gluten.")).asJava)
-    val glutenConfigEntries =
-      SQLConf.getConfigEntries().asScala.filter(e => 
e.key.contains("spark.gluten."))
-    val glutenConfig = glutenConfigEntries.map(e => (e.key, 
e.readFrom(reader).toString)).toMap
-    conf.map(e => (e._1, glutenConfig.getOrElse(e._1, e._2)))
+    val reader = new 
ConfigReader(conf.filter(_._1.startsWith("spark.gluten.")).asJava)
+    conf.map {
+      case (k, v) =>
+        if (k.startsWith("spark.gluten.")) {
+          (k, getConfString(reader, k, v))
+        } else {
+          (k, v)
+        }
+    }.toMap
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@gluten.apache.org
For additional commands, e-mail: commits-h...@gluten.apache.org

Reply via email to