This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8c601ea2cf6 [SPARK-43449][INFRA] Remove branch-3.2 daily GitHub Action 
job and conditions
8c601ea2cf6 is described below

commit 8c601ea2cf6a89e9e879f65ad8ab9ba96f73c616
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Thu May 11 02:38:27 2023 -0700

    [SPARK-43449][INFRA] Remove branch-3.2 daily GitHub Action job and 
conditions
    
    ### What changes were proposed in this pull request?
    
    This PR aims the following.
    - Remove Daily GitHub Action job on branch-3.2 to save the community 
resource
      - https://github.com/apache/spark/actions/workflows/build_branch32.yml
    - Simplify `build_and_test.yml` by removing `branch-3.2` specific code.
    
    ### Why are the changes needed?
    
    Apache Spark 3.2 is EOL.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    Closes #41134 from dongjoon-hyun/SPARK-43449.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .github/workflows/build_and_test.yml | 19 ++++++--------
 .github/workflows/build_branch32.yml | 49 ------------------------------------
 2 files changed, 8 insertions(+), 60 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index d3b634ffa26..4aff1bc9753 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -59,7 +59,7 @@ jobs:
       required: ${{ steps.set-outputs.outputs.required }}
       image_url: >-
         ${{
-          ((inputs.branch == 'branch-3.2' || inputs.branch == 'branch-3.3') && 
'dongjoon/apache-spark-github-action-image:20220207')
+          (inputs.branch == 'branch-3.3' && 
'dongjoon/apache-spark-github-action-image:20220207')
           || steps.infra-image-outputs.outputs.image_url
         }}
     steps:
@@ -80,15 +80,12 @@ jobs:
       id: set-outputs
       run: |
         if [ -z "${{ inputs.jobs }}" ]; then
-          # is-changed.py is missing in branch-3.2, and it might run in 
scheduled build, see also SPARK-39517
           pyspark=true; sparkr=true; tpcds=true; docker=true;
-          if [ -f "./dev/is-changed.py" ]; then
-            pyspark_modules=`cd dev && python -c "import 
sparktestsupport.modules as m; print(','.join(m.name for m in m.all_modules if 
m.name.startswith('pyspark')))"`
-            pyspark=`./dev/is-changed.py -m $pyspark_modules`
-            sparkr=`./dev/is-changed.py -m sparkr`
-            tpcds=`./dev/is-changed.py -m sql`
-            docker=`./dev/is-changed.py -m docker-integration-tests`
-          fi
+          pyspark_modules=`cd dev && python -c "import 
sparktestsupport.modules as m; print(','.join(m.name for m in m.all_modules if 
m.name.startswith('pyspark')))"`
+          pyspark=`./dev/is-changed.py -m $pyspark_modules`
+          sparkr=`./dev/is-changed.py -m sparkr`
+          tpcds=`./dev/is-changed.py -m sql`
+          docker=`./dev/is-changed.py -m docker-integration-tests`
           # 'build', 'scala-213', and 'java-11-17' are always true for now.
           # It does not save significant time and most of PRs trigger the 
build.
           precondition="
@@ -278,7 +275,7 @@ jobs:
       (fromJson(needs.precondition.outputs.required).pyspark == 'true' ||
       fromJson(needs.precondition.outputs.required).lint == 'true' ||
       fromJson(needs.precondition.outputs.required).sparkr == 'true') &&
-      (inputs.branch != 'branch-3.2' && inputs.branch != 'branch-3.3')
+      (inputs.branch != 'branch-3.3')
     runs-on: ubuntu-latest
     permissions:
       packages: write
@@ -602,7 +599,7 @@ jobs:
     - name: Java linter
       run: ./dev/lint-java
     - name: Spark connect jvm client mima check
-      if: inputs.branch != 'branch-3.2' && inputs.branch != 'branch-3.3'
+      if: inputs.branch != 'branch-3.3'
       run: ./dev/connect-jvm-client-mima-check
     - name: Install Python linter dependencies
       run: |
diff --git a/.github/workflows/build_branch32.yml 
b/.github/workflows/build_branch32.yml
deleted file mode 100644
index 723db45ca37..00000000000
--- a/.github/workflows/build_branch32.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-name: "Build (branch-3.2, Scala 2.13, Hadoop 3, JDK 8)"
-
-on:
-  schedule:
-    - cron: '0 4 * * *'
-
-jobs:
-  run-build:
-    permissions:
-      packages: write
-    name: Run
-    uses: ./.github/workflows/build_and_test.yml
-    if: github.repository == 'apache/spark'
-    with:
-      java: 8
-      branch: branch-3.2
-      hadoop: hadoop3.2
-      envs: >-
-        {
-          "SCALA_PROFILE": "scala2.13"
-        }
-      # TODO(SPARK-39712): Reenable "sparkr": "true"
-      # TODO(SPARK-39685): Reenable "lint": "true"
-      # TODO(SPARK-39681): Reenable "pyspark": "true"
-      # TODO(SPARK-39682): Reenable "docker-integration-tests": "true"
-      jobs: >-
-        {
-          "build": "true",
-          "tpcds-1g": "true"
-        }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to