This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new 687544988c2 [SPARK-42587][CONNECT][TESTS] Use wrapper versions for SBT 
and Maven in `connect` module tests
687544988c2 is described below

commit 687544988c21395c299693c2da9de422e1140abf
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sun Feb 26 18:34:45 2023 -0800

    [SPARK-42587][CONNECT][TESTS] Use wrapper versions for SBT and Maven in 
`connect` module tests
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use `wrapper versions` for SBT and Maven in `connect` test 
module's exceptions and comments.
    
    ### Why are the changes needed?
    
    To clarity the versions we used.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    Closes #40180 from dongjoon-hyun/SPARK-42587.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
    (cherry picked from commit a6a90feb4be891375cefbd7bbc75078e297ed008)
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../org/apache/spark/sql/connect/client/CompatibilitySuite.scala   | 7 ++++---
 .../spark/sql/connect/client/util/IntegrationTestUtils.scala       | 7 ++++---
 .../apache/spark/sql/connect/client/util/RemoteSparkSession.scala  | 3 ++-
 3 files changed, 10 insertions(+), 7 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala
index ad91accd5da..35cecaa20d7 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala
@@ -39,14 +39,15 @@ import 
org.apache.spark.sql.connect.client.util.IntegrationTestUtils._
  *     spark-sql
  *     spark-connect-client-jvm
  * }}}
- * To build the above artifact, use e.g. `sbt package` or `mvn clean install 
-DskipTests`.
+ * To build the above artifact, use e.g. `build/sbt package` or
+ * `build/mvn clean install -DskipTests`.
  *
  * When debugging this test, if any changes to the client API, the client jar 
need to be built
  * before running the test. An example workflow with SBT for this test:
  *   1. Compatibility test has reported an unexpected client API change.
  *   1. Fix the wrong client API.
- *   1. Build the client jar: `sbt package`
- *   1. Run the test again: `sbt "testOnly
+ *   1. Build the client jar: `build/sbt package`
+ *   1. Run the test again: `build/sbt "testOnly
  *      org.apache.spark.sql.connect.client.CompatibilitySuite"`
  */
 class CompatibilitySuite extends ConnectFunSuite {
diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala
index 2725422c299..6c465c83b08 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala
@@ -40,8 +40,9 @@ object IntegrationTestUtils {
   private[connect] def debug(error: Throwable): Unit = if (isDebug) 
error.printStackTrace()
 
   /**
-   * Find a jar in the Spark project artifacts. It requires a build first 
(e.g. sbt package, mvn
-   * clean install -DskipTests) so that this method can find the jar in the 
target folders.
+   * Find a jar in the Spark project artifacts. It requires a build first 
(e.g. build/sbt package,
+   * build/mvn clean install -DskipTests) so that this method can find the jar 
in the target
+   * folders.
    *
    * @return
    *   the jar
@@ -52,7 +53,7 @@ object IntegrationTestUtils {
       targetDir.exists(),
       s"Fail to locate the target folder: '${targetDir.getCanonicalPath}'. " +
         s"SPARK_HOME='${new File(sparkHome).getCanonicalPath}'. " +
-        "Make sure the spark project jars has been built (e.g. using sbt 
package)" +
+        "Make sure the spark project jars has been built (e.g. using build/sbt 
package)" +
         "and the env variable `SPARK_HOME` is set correctly.")
     val jars = recursiveListFiles(targetDir).filter { f =>
       // SBT jar
diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
index 2d8d9b02d4f..8f91ad31764 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
@@ -32,7 +32,8 @@ import org.apache.spark.util.Utils
 
 /**
  * An util class to start a local spark connect server in a different process 
for local E2E tests.
- * Pre-running the tests, the spark connect artifact needs to be built using 
e.g. `sbt package`.
+ * Pre-running the tests, the spark connect artifact needs to be built using 
e.g.
+ * `build/sbt package`.
  * It is designed to start the server once but shared by all tests. It is 
equivalent to use the
  * following command to start the connect server via command line:
  *


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to