This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new cd789acb5e51 [SPARK-48088][PYTHON][CONNECT][TESTS] Prepare backward 
compatibility test 4.0 <> above
cd789acb5e51 is described below

commit cd789acb5e51172e43052b59c4b610e64f380a16
Author: Hyukjin Kwon <gurwls...@apache.org>
AuthorDate: Fri May 3 01:08:05 2024 -0700

    [SPARK-48088][PYTHON][CONNECT][TESTS] Prepare backward compatibility test 
4.0 <> above
    
    ### What changes were proposed in this pull request?
    
    This PR forward ports https://github.com/apache/spark/pull/46334 to reduce 
conflicts.
    
    ### Why are the changes needed?
    
    To reduce the conflict against branch-3.5, and prepare 4.0 <> above test.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, dev-only.
    
    ### How was this patch tested?
    
    CI in this PR should verify them.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #46358 from HyukjinKwon/SPARK-48088-40.
    
    Authored-by: Hyukjin Kwon <gurwls...@apache.org>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 python/pyspark/util.py |  3 +++
 python/run-tests.py    | 18 +++++++++++-------
 2 files changed, 14 insertions(+), 7 deletions(-)

diff --git a/python/pyspark/util.py b/python/pyspark/util.py
index bf1cf5b59553..f0fa4a2413ce 100644
--- a/python/pyspark/util.py
+++ b/python/pyspark/util.py
@@ -747,6 +747,9 @@ def is_remote_only() -> bool:
     """
     global _is_remote_only
 
+    if "SPARK_SKIP_CONNECT_COMPAT_TESTS" in os.environ:
+        return True
+
     if _is_remote_only is not None:
         return _is_remote_only
     try:
diff --git a/python/run-tests.py b/python/run-tests.py
index ebdd4a9a2179..64ac48e210db 100755
--- a/python/run-tests.py
+++ b/python/run-tests.py
@@ -62,13 +62,15 @@ LOGGER = logging.getLogger()
 
 # Find out where the assembly jars are located.
 # TODO: revisit for Scala 2.13
-for scala in ["2.13"]:
-    build_dir = os.path.join(SPARK_HOME, "assembly", "target", "scala-" + 
scala)
-    if os.path.isdir(build_dir):
-        SPARK_DIST_CLASSPATH = os.path.join(build_dir, "jars", "*")
-        break
-else:
-    raise RuntimeError("Cannot find assembly build directory, please build 
Spark first.")
+SPARK_DIST_CLASSPATH = ""
+if "SPARK_SKIP_CONNECT_COMPAT_TESTS" not in os.environ:
+    for scala in ["2.13"]:
+        build_dir = os.path.join(SPARK_HOME, "assembly", "target", "scala-" + 
scala)
+        if os.path.isdir(build_dir):
+            SPARK_DIST_CLASSPATH = os.path.join(build_dir, "jars", "*")
+            break
+    else:
+        raise RuntimeError("Cannot find assembly build directory, please build 
Spark first.")
 
 
 def run_individual_python_test(target_dir, test_name, pyspark_python, 
keep_test_output):
@@ -100,6 +102,8 @@ def run_individual_python_test(target_dir, test_name, 
pyspark_python, keep_test_
 
     if "SPARK_CONNECT_TESTING_REMOTE" in os.environ:
         env.update({"SPARK_CONNECT_TESTING_REMOTE": 
os.environ["SPARK_CONNECT_TESTING_REMOTE"]})
+    if "SPARK_SKIP_CONNECT_COMPAT_TESTS" in os.environ:
+        env.update({"SPARK_SKIP_JVM_REQUIRED_TESTS": 
os.environ["SPARK_SKIP_CONNECT_COMPAT_TESTS"]})
 
     # Create a unique temp directory under 'target/' for each run. The TMPDIR 
variable is
     # recognized by the tempfile module to override the default system temp 
directory.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to