This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 85edd784fc72c1784849e2b122cbf3215f89817c
Author: Matthias Pohl <matthias.p...@aiven.io>
AuthorDate: Sun Dec 10 13:36:57 2023 +0100

    [FLINK-33914][ci] Introduces a basic GitHub Actions workflow
    
    - Adds basic CI workflow
    - Adds composite workflow for starting Flink's CI stages
    - Updates CI helper scripts to support GitHub Actions
    - Adds composite workflow for doing pre-compile checks
    - Adds custom action to enable maven-utils.sh in workflows
    - Adds custom action for initializing a job
---
 .github/actions/job_init/action.yml               |  72 ++++
 .github/actions/run_mvn/action.yml                |  42 +++
 .github/workflows/ci.yml                          |  40 +++
 .github/workflows/template.flink-ci.yml           | 386 ++++++++++++++++++++++
 .github/workflows/template.pre-compile-checks.yml |  77 +++++
 tools/azure-pipelines/create_build_artifact.sh    |  10 +-
 tools/azure-pipelines/debug_files_utils.sh        |  18 +
 tools/azure-pipelines/unpack_build_artifact.sh    |   9 +-
 tools/azure-pipelines/uploading_watchdog.sh       |   2 +
 9 files changed, 653 insertions(+), 3 deletions(-)

diff --git a/.github/actions/job_init/action.yml 
b/.github/actions/job_init/action.yml
new file mode 100644
index 00000000000..5158d855857
--- /dev/null
+++ b/.github/actions/job_init/action.yml
@@ -0,0 +1,72 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+---
+name: "Initializes the job"
+description: "Does all the necessary steps to set up the job"
+inputs:
+  jdk_version:
+    description: "The JDK version that's supposed to be used."
+    required: true
+  maven_repo_folder:
+    description: "The location of the local Maven repository (not setting this 
parameter will omit the caching of Maven artifacts)."
+    required: false
+    default: ""
+  source_directory:
+    description: "Specifies the directory from which the code should be moved 
from (needed for containerized runs; not setting this parameter will omit 
moving the checkout)."
+    required: false
+    default: ""
+  target_directory:
+    description: "Specifies the directory to which the code should be moved to 
(needed for containerized runs; not setting this parameter will omit moving the 
checkout)."
+    required: false
+    default: ""
+runs:
+  using: "composite"
+  steps:
+    - name: "Initializes GHA_PIPELINE_START_TIME with the job's start time"
+      shell: bash
+      run: |
+        job_start_time="$(date --rfc-3339=seconds)"
+        echo "GHA_PIPELINE_START_TIME=${job_start_time}" >> "${GITHUB_ENV}"
+        echo "The job's start time is set to ${job_start_time}."
+
+    - name: "Set JDK version to ${{ inputs.jdk_version }}"
+      shell: bash
+      run: |
+        echo "JAVA_HOME=$JAVA_HOME_${{ inputs.jdk_version }}_X64" >> 
"${GITHUB_ENV}"
+        echo "PATH=$JAVA_HOME_${{ inputs.jdk_version }}_X64/bin:$PATH" >> 
"${GITHUB_ENV}"
+
+    - name: "Setup Maven package cache"
+      if: ${{ inputs.maven_repo_folder != '' }}
+      uses: actions/cache@v4
+      with:
+        path: ${{ inputs.maven_repo_folder }}
+        key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
+        restore-keys: ${{ runner.os }}-maven
+
+    - name: "Moves checkout content from ${{ inputs.source_directory }} to ${{ 
inputs.target_directory }}."
+      if: ${{ inputs.source_directory != '' && inputs.target_directory != '' }}
+      shell: bash
+      run: |
+        mkdir -p ${{ inputs.target_directory }}
+
+        # .scalafmt.conf is needed for Scala format checks
+        # .mvn is needed to make the Maven wrapper accessible in test runs
+        mv ${{ inputs.source_directory }}/* \
+          ${{ inputs.source_directory }}/.scalafmt.conf \
+          ${{ inputs.source_directory }}/.mvn \
+          ${{ inputs.target_directory }}
diff --git a/.github/actions/run_mvn/action.yml 
b/.github/actions/run_mvn/action.yml
new file mode 100644
index 00000000000..03595a25cdb
--- /dev/null
+++ b/.github/actions/run_mvn/action.yml
@@ -0,0 +1,42 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+---
+name: "Runs Maven Command"
+description: "Executes Flink's Maven wrapper with the passed Maven parameters."
+inputs:
+  working_directory:
+    description: "The directory under which the Maven command should be 
executed."
+    default: "${{ github.workspace }}"
+  maven-parameters:
+    description: "Any parameters of the Maven command."
+    default: ""
+  env:
+    description: "Any environment-specifics that are meant to be available in 
the context of the call."
+    default: ""
+runs:
+  using: "composite"
+  steps:
+    - name: "Runs Maven Command"
+      working-directory: "${{ inputs.working_directory }}"
+      shell: bash
+      run: |
+        # errexit needs to be disabled explicitly here because maven-utils.sh 
handles the error if a mirror is not available
+        set +o errexit
+        
+        ${{ inputs.env }} source "./tools/ci/maven-utils.sh"
+        run_mvn ${{ inputs.maven-parameters }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 00000000000..4c99011c7e7
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This workflow is meant for as a basic CI run covering the most-important 
features
+# and default Java version. It is meant to run before a PullRequest is merged.
+
+name: "Flink CI (beta)"
+
+on:
+  push:
+  workflow_dispatch:
+
+permissions: read-all
+
+jobs:
+  pre-compile-checks:
+    name: "Pre-compile Checks"
+    uses: ./.github/workflows/template.pre-compile-checks.yml
+  ci:
+    name: "Default (Java 8)"
+    uses: ./.github/workflows/template.flink-ci.yml
+    with:
+      environment: 'PROFILE="-Dinclude_hadoop_aws"'
+      jdk_version: 8
+    secrets:
+      s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
+      s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
+      s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
diff --git a/.github/workflows/template.flink-ci.yml 
b/.github/workflows/template.flink-ci.yml
new file mode 100644
index 00000000000..ee7629e5acd
--- /dev/null
+++ b/.github/workflows/template.flink-ci.yml
@@ -0,0 +1,386 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Workflow template for triggering the Flink's test suite.
+
+name: "Apache Flink Test Workflow Template"
+
+on:
+  workflow_call:
+    inputs:
+      workflow-caller-id:
+        description: "The calling job's ID that can be used for build artifact 
names (that need to be different between different jobs of the same workflow)."
+        default: ""
+        type: string
+      environment:
+        description: "Defines environment variables for downstream scripts."
+        required: true
+        type: string
+      jdk_version:
+        description: "The Java version to use."
+        default: 8
+        type: number
+    secrets:
+      s3_bucket:
+        required: false
+      s3_access_key:
+        required: false
+      s3_secret_key:
+        required: false
+
+permissions: read-all
+
+# Running logic within a container comes with challenges around file 
permissions (e.g. when trying
+# to generate the hash for a certain set of files; see 
https://github.com/actions/cache/issues/753):
+#
+# The code is cloned into the runner's workspace /home/runner/work/ but 
outside the container.
+# The runner's workspace folder is then mounted to /__w within the container. 
Files that were generated
+# as part of the checkout will have the runner's user as the owner. Any files 
that are generated during
+# the workflow execution will have the container's user as the owner (i.e. 
root). That can cause issues
+# with actions that need to access the files of the checkout.
+#
+# We can work around this issue by copying the source code to a separate 
location (i.e. the container
+# user's home folder). It's important to notice that any file that is subject 
to caching should still
+# live in the mounted folder to ensure accessibility by the cache action.
+env:
+  MOUNTED_WORKING_DIR: /__w/flink/flink
+  CONTAINER_LOCAL_WORKING_DIR: /root/flink
+  # The following two variables are used by the utility scripts for creating
+  # and unpacking the build artifacts.
+  FLINK_ARTIFACT_DIR: /root/artifact-directory
+  FLINK_ARTIFACT_FILENAME: flink_artifacts.tar.gz
+
+  MAVEN_REPO_FOLDER: /root/.m2/repository
+  MAVEN_ARGS: -Dmaven.repo.local=/root/.m2/repository
+  # required by tools/azure-pipelines/cache_docker_images.sh
+  DOCKER_IMAGES_CACHE_FOLDER: /root/.docker-cache
+
+jobs:
+  compile:
+    name: "Compile"
+    runs-on: ubuntu-22.04
+    container:
+      image: mapohl/flink-ci:FLINK-34194
+      # --init makes the process in the container being started as an init 
process which will clean up any daemon processes during shutdown
+      # --privileged allows writing coredumps in docker (FLINK-16973)
+      options: --init --privileged
+    env:
+      # timeout in minutes - this environment variable is required by 
uploading_watchdog.sh
+      GHA_JOB_TIMEOUT: 240
+    outputs:
+      stringified-workflow-name: ${{ 
steps.workflow-prep-step.outputs.stringified-workflow-name }}
+    steps:
+      - name: "Flink Checkout"
+        uses: actions/checkout@v4
+        with:
+          persist-credentials: false
+
+      - name: "Initialize job"
+        uses: "./.github/actions/job_init"
+        with:
+          jdk_version: ${{ inputs.jdk_version }}
+          maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
+          source_directory: ${{ env.MOUNTED_WORKING_DIR }}
+          target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+
+      - name: "Stringify workflow name"
+        id: workflow-prep-step
+        run: |
+          # adds a stringified version of the workflow name that can be used 
for generating unique build artifact names within a composite workflow
+          # - replaces any special characters (except for underscores and 
dots) with dashes
+          # - makes the entire string lowercase
+          # - condenses multiple dashes into a single one
+          # - removes leading and following dashes
+          stringified_workflow_name=$(echo "${{ github.workflow }}-${{ 
inputs.workflow-caller-id }}" | tr -C '[:alnum:]._' '-' |  tr '[:upper:]' 
'[:lower:]' | sed -e 's/--*/-/g' -e 's/^-*//g' -e 's/-*$//g')
+          echo "stringified-workflow-name=${stringified_workflow_name}" >> 
$GITHUB_OUTPUT
+
+      - name: "Compile Flink"
+        uses: "./.github/actions/run_mvn"
+        timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
+        with:
+          working_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+          maven-parameters: "test-compile -Dflink.markBundledAsOptional=false 
-Dfast"
+          env: "${{ inputs.environment }}"
+
+      - name: "Collect build artifacts"
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        run: |
+          ./tools/azure-pipelines/create_build_artifact.sh
+
+      - name: "Upload artifacts to make them available in downstream jobs"
+        uses: actions/upload-artifact@v4
+        with:
+          name: build-artifacts-${{ 
steps.workflow-prep-step.outputs.stringified-workflow-name }}-${{ 
github.run_number }}
+          path: ${{ env.FLINK_ARTIFACT_DIR }}/${{ env.FLINK_ARTIFACT_FILENAME 
}}
+          if-no-files-found: error
+          # use minimum here because we only need these artifacts to speed up 
the build
+          retention-days: 1
+
+  packaging:
+    name: "Test packaging/licensing"
+    needs: compile
+    runs-on: ubuntu-22.04
+    container:
+      image: mapohl/flink-ci:FLINK-34194
+      # --init makes the process in the container being started as an init 
process which will clean up any daemon processes during shutdown
+      # --privileged allows writing coredumps in docker (FLINK-16973)
+      options: --init --privileged
+
+    steps:
+      - name: "Flink Checkout"
+        uses: actions/checkout@v4
+        with:
+          persist-credentials: false
+
+      - name: "Initialize job"
+        uses: "./.github/actions/job_init"
+        with:
+          jdk_version: ${{ inputs.jdk_version }}
+          maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
+          source_directory: ${{ env.MOUNTED_WORKING_DIR }}
+          target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+
+      - name: "Download build artifacts from compile job"
+        uses: actions/download-artifact@v4
+        with:
+          name: build-artifacts-${{ 
needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
+          path: ${{ env.FLINK_ARTIFACT_DIR }}
+
+      - name: "Unpack build artifact"
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        run: |
+          ./tools/azure-pipelines/unpack_build_artifact.sh
+
+      - name: "Test"
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        run: |
+          ${{ inputs.environment }} ./tools/ci/compile_ci.sh || exit $?
+
+  test:
+    name: "Test (module: ${{ matrix.module }})"
+    needs: compile
+    runs-on: ubuntu-22.04
+    container:
+      image: mapohl/flink-ci:FLINK-34194
+      # --init makes the process in the container being started as an init 
process which will clean up any daemon processes during shutdown
+      # --privileged allows writing coredumps in docker (FLINK-16973)
+      options: --init --privileged
+    env:
+      # timeout in minutes - this environment variable is required by 
uploading_watchdog.sh
+      GHA_JOB_TIMEOUT: 240
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - module: core
+            stringified-module-name: core
+          - module: python
+            stringified-module-name: python
+          - module: table
+            stringified-module-name: table
+          - module: connect
+            stringified-module-name: connect
+          - module: tests
+            stringified-module-name: tests
+          - module: misc
+            stringified-module-name: misc
+
+    steps:
+      - name: "Flink Checkout"
+        uses: actions/checkout@v4
+        with:
+          persist-credentials: false
+
+      - name: "Initialize job"
+        uses: "./.github/actions/job_init"
+        with:
+          jdk_version: ${{ inputs.jdk_version }}
+          maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
+          source_directory: ${{ env.MOUNTED_WORKING_DIR }}
+          target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+
+      - name: "Set coredump pattern"
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        run: sudo sysctl -w kernel.core_pattern=core.%p
+
+      - name: "Download build artifacts from compile job"
+        uses: actions/download-artifact@v4
+        with:
+          name: build-artifacts-${{ 
needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
+          path: ${{ env.FLINK_ARTIFACT_DIR }}
+
+      - name: "Unpack build artifact"
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        run: |
+          ./tools/azure-pipelines/unpack_build_artifact.sh
+
+      - name: "Try loading Docker images from Cache"
+        id: docker-cache
+        uses: actions/cache@v4
+        with:
+          path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
+          key: ${{ matrix.module }}-docker-${{ runner.os }}-${{ 
hashFiles('**/cache_docker_images.sh', 
'**/flink-test-utils-parent/**/DockerImageVersions.java') }}
+          restore-keys: ${{ matrix.module }}-docker-${{ runner.os }}
+
+      - name: "Load Docker images if not present in cache, yet"
+        if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh load
+
+      - name: "Test - ${{ matrix.module }}"
+        id: test-run
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        env:
+          IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
+          IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
+          IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
+        timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
+        run: |
+          ${{ inputs.environment }} PROFILE="$PROFILE -Pgithub-actions" 
./tools/azure-pipelines/uploading_watchdog.sh \
+              ./tools/ci/test_controller.sh ${{ matrix.module }}
+
+      - name: "Post-process build artifacts"
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        run: find ${{ steps.test-run.outputs.debug-files-output-dir }} -type f 
-exec rename 's/[:<>|*?]/-/' {} \;
+
+      - name: "Upload build artifacts"
+        uses: actions/upload-artifact@v4
+        if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir }} 
!= ''
+        with:
+          name: logs-test-${{ needs.compile.outputs.stringified-workflow-name 
}}-${{ github.run_number }}-${{ matrix.stringified-module-name }}-${{ 
steps.test-run.outputs.debug-files-name }}
+          path: ${{ steps.test-run.outputs.debug-files-output-dir }}
+
+      - name: "Save Docker images to cache"
+        working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
+        if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh save
+
+  e2e:
+    name: "E2E (group ${{ matrix.group }})"
+    needs: compile
+    # the end to end tests are not executed in Flink's CI Docker container due 
to problems when running Docker-in-Docker
+    runs-on: ubuntu-22.04
+    timeout-minutes: 310
+    env:
+      # timeout in minutes - this environment variable is required by 
uploading_watchdog.sh
+      GHA_JOB_TIMEOUT: 310
+
+      # required and forwarded as the cache-dir system property to 
DownloadCacheFactory
+      # implementations in flink-end-to-end-tests/flink-end-to-end-tests-common
+      E2E_CACHE_FOLDER: ${{ github.workspace }}/.e2e-cache
+      # required by common_artifact_download_cacher.sh in 
flink-end-to-end-tests/test-scripts
+      E2E_TARBALL_CACHE: ${{ github.workspace }}/.e2e-tar-cache
+
+      # The following environment variables need to be overwritten here 
because the e2e tests do not
+      # run in containers.
+      MAVEN_REPO_FOLDER: ${{ github.workspace }}/.m2/repository
+      MAVEN_ARGS: -Dmaven.repo.local=${{ github.workspace }}/.m2/repository
+      FLINK_ARTIFACT_DIR: ${{ github.workspace }}
+      DOCKER_IMAGES_CACHE_FOLDER: ${{ github.workspace }}/.docker-cache
+    strategy:
+      fail-fast: false
+      matrix:
+        group: [1, 2]
+
+    steps:
+      - name: "Flink Checkout"
+        uses: actions/checkout@v4
+        with:
+          persist-credentials: false
+
+      - name: "Initialize job"
+        uses: "./.github/actions/job_init"
+        with:
+          jdk_version: ${{ inputs.jdk_version }}
+          maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
+
+      - name: "Install missing packages"
+        run: sudo apt-get install -y net-tools docker-compose zip
+
+      # netty-tcnative requires OpenSSL v1.0.0
+      - name: "Install OpenSSL"
+        run: |
+          sudo apt-get install -y bc libapr1
+          wget -r --no-parent -nd --accept=libssl1.0.0_*ubuntu5.*_amd64.deb 
http://security.ubuntu.com/ubuntu/pool/main/o/openssl1.0/
+          sudo apt install ./libssl1.0.0_*.deb
+
+      - name: "Download build artifacts from compile job"
+        uses: actions/download-artifact@v4
+        with:
+          name: build-artifacts-${{ 
needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
+          path: ${{ env.FLINK_ARTIFACT_DIR }}
+
+      - name: "Unpack build artifact"
+        run: ./tools/azure-pipelines/unpack_build_artifact.sh
+
+      # the cache task does not create directories on a cache miss, and can 
later fail when trying to tar the directory if the test haven't created it
+      # this may for example happen if a given directory is only used by a 
subset of tests, which are run in a different 'group'
+      - name: "Create cache directories"
+        run: |
+          mkdir -p ${{ env.E2E_CACHE_FOLDER }}
+          mkdir -p ${{ env.E2E_TARBALL_CACHE }}
+          mkdir -p ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
+
+      - name: "Load E2E files from Cache"
+        uses: actions/cache@v4
+        with:
+          path: ${{ env.E2E_CACHE_FOLDER }}
+          key: e2e-cache-${{ matrix.group }}-${{ 
hashFiles('**/flink-end-to-end-tests/**/*.java', '!**/avro/**') }}
+
+      - name: "Load E2E artifacts from Cache"
+        uses: actions/cache@v4
+        with:
+          path: ${{ env.E2E_TARBALL_CACHE }}
+          key: e2e-artifact-cache-${{ matrix.group }}-${{ 
hashFiles('**/flink-end-to-end-tests/**/*.sh') }}
+          restore-keys: e2e-artifact-cache-${{ matrix.group }}
+
+      - name: "Try loading Docker images from Cache"
+        id: docker-cache
+        uses: actions/cache@v4
+        with:
+          path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
+          key: e2e-${{ matrix.group }}-docker-${{ runner.os }}-${{ 
hashFiles('**/cache_docker_images.sh', 
'**/flink-test-utils-parent/**/DockerImageVersions.java') }}
+
+      - name: "Load Docker images if not present in Cache, yet"
+        if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh load
+
+      - name: "Build Flink"
+        uses: "./.github/actions/run_mvn"
+        with:
+          maven-parameters: "install -DskipTests -Dfast $PROFILE 
-Pskip-webui-build"
+          env: "${{ inputs.environment }}"
+
+      - name: "Run E2E Tests"
+        id: test-run
+        env:
+          IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
+          IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
+          IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
+        timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
+        run: |
+          ${{ inputs.environment }} FLINK_DIR=`pwd`/build-target 
PROFILE="$PROFILE -Pgithub-actions" 
./tools/azure-pipelines/uploading_watchdog.sh \
+            flink-end-to-end-tests/run-nightly-tests.sh ${{ matrix.group }}
+
+      - name: "Upload Logs"
+        uses: actions/upload-artifact@v4
+        if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir != 
'' }}
+        with:
+          name: logs-e2e-${{ needs.compile.outputs.stringified-workflow-name 
}}-${{ github.run_number }}-${{ matrix.group }}-${{ 
steps.test-run.outputs.debug-files-name }}
+          path: ${{ steps.test-run.outputs.debug-files-output-dir }}
+
+      - name: "Save Docker images to Cache"
+        if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh save
diff --git a/.github/workflows/template.pre-compile-checks.yml 
b/.github/workflows/template.pre-compile-checks.yml
new file mode 100644
index 00000000000..94fcafb26fd
--- /dev/null
+++ b/.github/workflows/template.pre-compile-checks.yml
@@ -0,0 +1,77 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This workflow collects all checks that do not require compilation and are, 
therefore,
+# JDK independent.
+
+name: "Pre-compile Checks"
+
+on:
+  workflow_dispatch:
+    inputs:
+      jdk_version:
+        description: "The JDK version that shall be used as a default within 
the Flink CI Docker container."
+        default: "8"
+        type: choice
+        options: ["8", "11", "17", "21"]
+
+  workflow_call:
+    inputs:
+      jdk_version:
+        description: "The JDK version that shall be used as a default within 
the Flink CI Docker container."
+        default: 8
+        type: number
+
+permissions: read-all
+
+# This workflow should only contain steps that do not require the compilation 
of Flink (and therefore, are
+# independent of the used JDK)
+jobs:
+  qa:
+    name: "Basic QA"
+    runs-on: ubuntu-22.04
+    container:
+      image: mapohl/flink-ci:FLINK-34194
+      # --init makes the process in the container being started as an init 
process which will clean up any daemon processes during shutdown
+      # --privileged allows writing coredumps in docker (FLINK-16973)
+      options: --init --privileged
+
+    steps:
+      - name: "Flink Checkout"
+        uses: actions/checkout@v4
+        with:
+          persist-credentials: false
+
+      - name: "Initialize job"
+        uses: "./.github/actions/job_init"
+        with:
+          jdk_version: ${{ inputs.jdk_version }}
+
+      - name: "Checkstyle"
+        uses: "./.github/actions/run_mvn"
+        with:
+          maven-parameters: "checkstyle:check -T1C"
+
+      - name: "Spotless"
+        if: (success() || failure())
+        uses: "./.github/actions/run_mvn"
+        with:
+          maven-parameters: "spotless:check -T1C"
+
+      - name: "License Headers"
+        if: (success() || failure())
+        uses: "./.github/actions/run_mvn"
+        with:
+          maven-parameters: "org.apache.rat:apache-rat-plugin:check -N"
diff --git a/tools/azure-pipelines/create_build_artifact.sh 
b/tools/azure-pipelines/create_build_artifact.sh
index 0beeca3b075..925a4a214a6 100755
--- a/tools/azure-pipelines/create_build_artifact.sh
+++ b/tools/azure-pipelines/create_build_artifact.sh
@@ -17,7 +17,7 @@
 # limitations under the License.
 
################################################################################
 
-echo "Creating build artifact dir $FLINK_ARTIFACT_DIR"
+echo "Copying build artifacts to directory $FLINK_ARTIFACT_DIR"
 
 cp -r . "$FLINK_ARTIFACT_DIR"
 
@@ -36,7 +36,7 @@ find "$FLINK_ARTIFACT_DIR" -maxdepth 8 -type f -name '*.jar' 
-exec rm -rf {} \;
 rm -rf "$FLINK_ARTIFACT_DIR/.git"
 
 # AZ Pipelines has a problem with links.
-rm "$FLINK_ARTIFACT_DIR/build-target"
+rm -f "$FLINK_ARTIFACT_DIR/build-target"
 
 # Remove javadocs because they are not used in later stages
 rm -rf "$FLINK_ARTIFACT_DIR/target/site"
@@ -45,3 +45,9 @@ rm -rf "$FLINK_ARTIFACT_DIR/target/site"
 rm -rf "$FLINK_ARTIFACT_DIR/flink-runtime-web/web-dashboard/node"
 rm -rf "$FLINK_ARTIFACT_DIR/flink-runtime-web/web-dashboard/node_modules"
 
+if [ -n "${FLINK_ARTIFACT_FILENAME}" ]; then
+  # GitHub Actions doesn't create an archive automatically - packaging the 
files improves the performance of artifact uploads
+  echo "Archives artifacts into 
${FLINK_ARTIFACT_DIR}/${FLINK_ARTIFACT_FILENAME}"
+  tar --create --gzip --exclude 
"${FLINK_ARTIFACT_DIR}/${FLINK_ARTIFACT_FILENAME}" --file 
"${FLINK_ARTIFACT_FILENAME}" -C "${FLINK_ARTIFACT_DIR}" .
+  mv "${FLINK_ARTIFACT_FILENAME}" "${FLINK_ARTIFACT_DIR}"
+fi
diff --git a/tools/azure-pipelines/debug_files_utils.sh 
b/tools/azure-pipelines/debug_files_utils.sh
index c2b3c3d5e8e..9deed0b97db 100755
--- a/tools/azure-pipelines/debug_files_utils.sh
+++ b/tools/azure-pipelines/debug_files_utils.sh
@@ -55,3 +55,21 @@ function prepare_debug_files {
 
        mkdir -p $DEBUG_FILES_OUTPUT_DIR || { echo "FAILURE: cannot create 
debug files directory '${DEBUG_FILES_OUTPUT_DIR}'." ; exit 1; }
 }
+
+function unset_debug_artifacts_if_empty {
+       if [ -z "${DEBUG_FILES_OUTPUT_DIR+x}" ]; then
+               echo "[ERROR] No environment variable DEBUG_FILES_OUTPUT_DIR 
was set."
+               exit 1
+       elif [ "$(ls -A ${DEBUG_FILES_OUTPUT_DIR} | wc -l)" -eq 0 ]; then
+               echo "[INFO] Unsetting environment variable 
DEBUG_FILES_OUTPUT_DIR because there were no artifacts produced."
+
+               if [ -n "${TF_BUILD+x}" ]; then
+                       echo "##vso[task.setvariable 
variable=DEBUG_FILES_OUTPUT_DIR]"
+               elif [ -n "${GITHUB_ACTIONS+x}" ]; then
+                       echo "debug-files-output-dir=" >> "$GITHUB_OUTPUT"
+               else
+                       echo "[ERROR] No CI environment detected. Debug 
artifact-related variable won't be unset."
+                       exit 1
+               fi
+       fi
+}
diff --git a/tools/azure-pipelines/unpack_build_artifact.sh 
b/tools/azure-pipelines/unpack_build_artifact.sh
index 68f8d221aae..60a65bde340 100755
--- a/tools/azure-pipelines/unpack_build_artifact.sh
+++ b/tools/azure-pipelines/unpack_build_artifact.sh
@@ -24,7 +24,14 @@ if ! [ -e $FLINK_ARTIFACT_DIR ]; then
 fi
 
 echo "Merging cache"
-cp -RT "$FLINK_ARTIFACT_DIR" "."
+if [ -z "${FLINK_ARTIFACT_FILENAME}" ]; then
+  # for Azure Pipelines
+  cp -RT "$FLINK_ARTIFACT_DIR" "."
+else
+  # for GitHub Actions
+  echo "Extract build artifacts 
${FLINK_ARTIFACT_DIR}/${FLINK_ARTIFACT_FILENAME} into local directory."
+  tar -xzf "${FLINK_ARTIFACT_DIR}/${FLINK_ARTIFACT_FILENAME}"
+fi
 
 echo "Adjusting timestamps"
 # adjust timestamps of proto file to avoid re-generation
diff --git a/tools/azure-pipelines/uploading_watchdog.sh 
b/tools/azure-pipelines/uploading_watchdog.sh
index dc3d227ecbe..5b5963782f4 100755
--- a/tools/azure-pipelines/uploading_watchdog.sh
+++ b/tools/azure-pipelines/uploading_watchdog.sh
@@ -110,6 +110,8 @@ if [[ "$TEST_EXIT_CODE" == 0 ]]; then
   kill $WATCHDOG_PID
   rm $DEBUG_FILES_OUTPUT_DIR/watchdog
   rm -f $DEBUG_FILES_OUTPUT_DIR/jps-traces.*
+
+  unset_debug_artifacts_if_empty
 fi
 
 # properly forward exit code

Reply via email to