This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 38b3aca8bab [SPARK-44519][CONNECT] SparkConnectServerUtils generated 
incorrect parameters for jars
38b3aca8bab is described below

commit 38b3aca8bab7e18365417f3e0a0e441baaa0ddc8
Author: Jiaan Geng <belie...@163.com>
AuthorDate: Mon Jul 24 19:39:03 2023 +0800

    [SPARK-44519][CONNECT] SparkConnectServerUtils generated incorrect 
parameters for jars
    
    ### What changes were proposed in this pull request?
    https://github.com/apache/spark/pull/41932 try to add test case for 
connect, then we found the maven build failure based on the bug discussed at 
https://github.com/apache/spark/pull/41932#discussion_r1266118099
    
    After some communication, cloud-fan and zhengruifeng suggested to ignore 
the test case for connect.
    So I commit this PR to fix the bug.
    
    ### Why are the changes needed?
    Fix the bug that `SparkConnectServerUtils` generated incorrect parameters 
for jars.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    Just update the inner implementation.
    
    ### How was this patch tested?
    N/A
    
    Closes #42121 from beliefer/SPARK-44519.
    
    Authored-by: Jiaan Geng <belie...@163.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
    (cherry picked from commit 4644344f443f2f6ab26a72b44b9219fb6c82d26e)
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 .../connect/client/util/RemoteSparkSession.scala   | 48 +++++++++++-----------
 1 file changed, 24 insertions(+), 24 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
index 8d84dffc9d5..594d3c369fe 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
@@ -93,19 +93,30 @@ object SparkConnectServerUtils {
    * configs, we add them here
    */
   private def testConfigs: Seq[String] = {
+    // To find InMemoryTableCatalog for V2 writer tests
+    val catalystTestJar =
+      tryFindJar("sql/catalyst", "spark-catalyst", "spark-catalyst", test = 
true)
+        .map(clientTestJar => Seq("--jars", clientTestJar.getCanonicalPath))
+        .getOrElse(Seq.empty)
+
+    // For UDF maven E2E tests, the server needs the client code to find the 
UDFs defined in tests.
+    val connectClientTestJar = tryFindJar(
+      "connector/connect/client/jvm",
+      // SBT passes the client & test jars to the server process automatically.
+      // So we skip building or finding this jar for SBT.
+      "sbt-tests-do-not-need-this-jar",
+      "spark-connect-client-jvm",
+      test = true)
+      .map(clientTestJar => Seq(clientTestJar.getCanonicalPath))
+      .getOrElse(Seq.empty)
+
+    val allJars = catalystTestJar ++ connectClientTestJar
+    val jarsConfigs = Seq("--jars", allJars.mkString(","))
+
     // Use InMemoryTableCatalog for V2 writer tests
-    val writerV2Configs = {
-      val catalystTestJar = findJar( // To find InMemoryTableCatalog for V2 
writer tests
-        "sql/catalyst",
-        "spark-catalyst",
-        "spark-catalyst",
-        test = true).getCanonicalPath
-      Seq(
-        "--jars",
-        catalystTestJar,
-        "--conf",
-        
"spark.sql.catalog.testcat=org.apache.spark.sql.connector.catalog.InMemoryTableCatalog")
-    }
+    val writerV2Configs = Seq(
+      "--conf",
+      
"spark.sql.catalog.testcat=org.apache.spark.sql.connector.catalog.InMemoryTableCatalog")
 
     // Run tests using hive
     val hiveTestConfigs = {
@@ -128,18 +139,7 @@ object SparkConnectServerUtils {
       Seq("--conf", s"spark.sql.catalogImplementation=$catalogImplementation")
     }
 
-    // For UDF maven E2E tests, the server needs the client code to find the 
UDFs defined in tests.
-    val udfTestConfigs = tryFindJar(
-      "connector/connect/client/jvm",
-      // SBT passes the client & test jars to the server process automatically.
-      // So we skip building or finding this jar for SBT.
-      "sbt-tests-do-not-need-this-jar",
-      "spark-connect-client-jvm",
-      test = true)
-      .map(clientTestJar => Seq("--jars", clientTestJar.getCanonicalPath))
-      .getOrElse(Seq.empty)
-
-    writerV2Configs ++ hiveTestConfigs ++ udfTestConfigs
+    jarsConfigs ++ writerV2Configs ++ hiveTestConfigs
   }
 
   def start(): Unit = {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to