This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 54d3c2e945e8 [SPARK-51800][INFRA] Set up the CI for UDS in PySpark
54d3c2e945e8 is described below

commit 54d3c2e945e84c31fa35a0fdbc7987eb8e2437d3
Author: Hyukjin Kwon <[email protected]>
AuthorDate: Tue Apr 15 13:20:15 2025 +0900

    [SPARK-51800][INFRA] Set up the CI for UDS in PySpark
    
    ### What changes were proposed in this pull request?
    
    This PR is a followup of https://github.com/apache/spark/pull/50466 that 
sets the CI for UDS in Python.
    
    ### Why are the changes needed?
    
    It might be flaky to make a mistake in the codebase. We should refactor a 
bit as well to prevent this.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Setting the CI for it. Will monitor the build.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #50585 from HyukjinKwon/SPARK-51800.
    
    Authored-by: Hyukjin Kwon <[email protected]>
    Signed-off-by: Hyukjin Kwon <[email protected]>
---
 .github/workflows/build_uds.yml                    | 53 ++++++++++++++++++++++
 .../org/apache/spark/internal/config/Python.scala  |  2 +-
 2 files changed, 54 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/build_uds.yml b/.github/workflows/build_uds.yml
new file mode 100644
index 000000000000..29aadcecf6d9
--- /dev/null
+++ b/.github/workflows/build_uds.yml
@@ -0,0 +1,53 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+name: "Build / Unix Domain Socket (master, Hadoop 3, JDK 17, Scala 2.13)"
+
+on:
+  schedule:
+    - cron: '0 1 */3 * *'
+  workflow_dispatch:
+
+jobs:
+  run-build:
+    permissions:
+      packages: write
+    name: Run
+    uses: ./.github/workflows/build_and_test.yml
+    if: github.repository == 'apache/spark'
+    with:
+      java: 17
+      branch: master
+      hadoop: hadoop3
+      envs: >-
+        {
+          "PYSPARK_IMAGE_TO_TEST": "python-311",
+          "PYTHON_TO_TEST": "python3.11",
+          "PYSPARK_UDS_MODE": "true",
+        }
+      jobs: >-
+        {
+          "build": "true",
+          "docs": "true",
+          "pyspark": "true",
+          "sparkr": "true",
+          "tpcds-1g": "true",
+          "docker-integration-tests": "true",
+          "yarn": "true"
+        }
diff --git a/core/src/main/scala/org/apache/spark/internal/config/Python.scala 
b/core/src/main/scala/org/apache/spark/internal/config/Python.scala
index 7f9921d58dba..46d54be92f3d 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/Python.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/Python.scala
@@ -77,7 +77,7 @@ private[spark] object Python {
       "to Spark Classic and Spark Connect server.")
     .version("4.1.0")
     .booleanConf
-    .createWithDefault(false)
+    .createWithDefault(sys.env.get("PYSPARK_UDS_MODE").contains("true"))
 
   val PYTHON_UNIX_DOMAIN_SOCKET_DIR = 
ConfigBuilder("spark.python.unix.domain.socket.dir")
     .doc("When specified, it uses the directory to create Unix domain socket 
files. " +


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to