This is an automated email from the ASF dual-hosted git repository.

leonard pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-cdc.git


The following commit(s) were added to refs/heads/master by this push:
     new f7fa31758 [FLINK-34187][cdc][build] Setup Github CI for Flink CDC 
project
f7fa31758 is described below

commit f7fa3175809b4b912b19ad91c89599bf1c2b9ea3
Author: gongzhongqiang <gongzhongqi...@apache.org>
AuthorDate: Tue Mar 5 13:58:36 2024 +0800

    [FLINK-34187][cdc][build] Setup Github CI for Flink CDC project
    
    This closes #3022.
---
 .github/workflows/flink_cdc.yml                | 217 +++++++++++++++++++++++++
 azure-pipelines.yml                            |  47 ------
 tools/azure-pipelines/create_build_artifact.sh |  35 ----
 tools/azure-pipelines/debug_files_utils.sh     |  25 ---
 tools/azure-pipelines/jobs-template.yml        | 197 ----------------------
 tools/azure-pipelines/unpack_build_artifact.sh |  31 ----
 tools/azure-pipelines/uploading_watchdog.sh    |  85 ----------
 tools/ci/compile.sh                            |  63 -------
 tools/ci/controller_utils.sh                   |  50 ------
 tools/ci/google-mirror-settings.xml            |  26 ---
 tools/ci/log4j.properties                      |  32 ----
 tools/ci/maven-utils.sh                        | 100 ------------
 tools/ci/stage.sh                              | 198 ----------------------
 tools/ci/test_controller.sh                    | 113 -------------
 tools/ci/watchdog.sh                           | 127 ---------------
 15 files changed, 217 insertions(+), 1129 deletions(-)

diff --git a/.github/workflows/flink_cdc.yml b/.github/workflows/flink_cdc.yml
new file mode 100644
index 000000000..9c977ebab
--- /dev/null
+++ b/.github/workflows/flink_cdc.yml
@@ -0,0 +1,217 @@
+#
+# Copyright 2023 Ververica Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: Flink CDC CI
+on:
+  push:
+    branches:
+      - master
+      - release-*
+  pull_request:
+    branches:
+      - master
+      - release-*
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
+env:
+  MODULES_CORE: "\
+  flink-cdc-cli,\
+  flink-cdc-common,\
+  flink-cdc-composer,\
+  flink-cdc-runtime"
+  
+  MODULES_PIPELINE_CONNECTORS: "\
+  flink-cdc-connect/flink-cdc-pipeline-connectors"
+  
+  MODULES_MYSQL: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mysql-cdc,\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mysql-cdc"
+  
+  MODULES_POSTGRES: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc,\
+  
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-postgres-cdc"
+  
+  MODULES_ORACLE: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oracle-cdc,\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oracle-cdc"
+  
+  MODULES_MONGODB: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mongodb-cdc,\
+  
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mongodb-cdc"
+  
+  MODULES_SQLSERVER: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-sqlserver-cdc,\
+  
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-sqlserver-cdc"
+  
+  MODULES_TIDB: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-tidb-cdc,\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-tidb-cdc"
+  
+  MODULES_OCEANBASE: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oceanbase-cdc,\
+  
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oceanbase-cdc"
+  
+  MODULES_DB2: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-db2-cdc,\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-db2-cdc"
+  
+  MODULES_VITESS: "\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-connector-vitess-cdc,\
+  flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-vitess-cdc"
+  
+  MODULES_E2E: "\
+  flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests,\
+  flink-cdc-e2e-tests/flink-cdc-source-e2e-tests"
+
+jobs:
+  compile_and_test:
+    # Only run the CI pipeline for the flink-cdc-connectors repository
+#    if: github.repository == 'apache/flink-cdc-connectors'
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        jdk: [ 8 ]
+        module: [ "core",
+                  "pipeline_connectors",
+                  "mysql",
+                  "postgres",
+                  "oracle",
+                  "mongodb",
+                  "sqlserver",
+                  "tidb",
+                  "oceanbase",
+                  "db2",
+                  "vitess",
+                  "e2e"
+        ]
+    timeout-minutes: 120
+    env:
+      MVN_COMMON_OPTIONS: -Dmaven.wagon.http.pool=false \
+        -Dorg.slf4j.simpleLogger.showDateTime=true \
+        -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss.SSS \
+        
-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
 \
+        --no-snapshot-updates -B \
+        --settings /home/vsts/work/1/s/tools/ci/google-mirror-settings.xml \
+        -Dfast -Dlog.dir=/home/vsts/work/_temp/debug_files \
+        
-Dlog4j.configurationFile=file:///home/vsts/work/1/s/tools/ci/log4j.properties
+    steps:
+      - run: echo "Running CI pipeline for JDK version ${{ matrix.jdk }}"
+
+      - name: Check out repository code
+        uses: actions/checkout@v4
+        with:
+          submodules: true
+
+      - name: Set JDK
+        uses: actions/setup-java@v4
+        with:
+          java-version: ${{ matrix.jdk }}
+          distribution: 'temurin'
+          cache: 'maven'
+
+      - name: Set Maven 3.8.6
+        uses: stCarolas/setup-maven@v4.5
+        with:
+          maven-version: 3.8.6
+
+      - name: Compile and test ${{ matrix.module }}
+        timeout-minutes: 90
+        run: |
+          set -o pipefail
+          
+          case ${{ matrix.module }} in
+              ("core")
+               modules=${{ env.MODULES_CORE }}
+              ;;
+              ("pipeline_connectors")
+               modules=${{ env.MODULES_PIPELINE_CONNECTORS }}
+              ;;
+              ("mysql")
+               modules=${{ env.MODULES_MYSQL }}
+              ;;
+              ("postgres")
+               modules=${{ env.MODULES_POSTGRES }}
+              ;;
+              ("oracle")
+               modules=${{ env.MODULES_ORACLE }}
+              ;;
+              ("mongodb")
+               modules=${{ env.MODULES_MONGODB }}
+              ;;
+              ("sqlserver")
+               modules=${{ env.MODULES_SQLSERVER }}
+              ;;
+              ("tidb")
+               modules=${{ env.MODULES_TIDB }}
+              ;;
+              ("oceanbase")
+               modules=${{ env.MODULES_OCEANBASE }}
+              ;;
+              ("db2")
+               modules=${{ env.MODULES_DB2 }}
+              ;;
+              ("vitess")
+               modules=${{ env.MODULES_VITESS }}
+              ;;
+              ("e2e")
+               compile_modules="${{ env.MODULES_CORE }},${{ 
env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL }},${{ 
env.MODULES_POSTGRES }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB 
}},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ 
env.MODULES_OCEANBASE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ 
env.MODULES_E2E }}"
+               modules=${{ env.MODULES_E2E }}
+              ;;
+          esac
+
+          if [ ${{ matrix.module }} != "e2e" ]; then
+            compile_modules=$modules
+          fi
+
+          mvn --no-snapshot-updates -B -DskipTests -pl $compile_modules -am 
install && mvn --no-snapshot-updates -B -pl $modules verify
+
+      - name: Print JVM thread dumps when cancelled
+        if: ${{ failure() }}
+        run: |
+          # 
----------------------------------------------------------------------------
+          # Copyright 2023 The Netty Project
+          #
+          # 
----------------------------------------------------------------------------
+          # Source: 
https://github.com/netty/netty/blob/main/.github/actions/thread-dump-jvms/action.yml
+          echo "$OSTYPE"
+          if [[ "$OSTYPE" == "linux-gnu"* ]] && command -v sudo &> /dev/null; 
then
+            echo "Setting up JVM thread dumps"
+            # use jattach so that Java processes in docker containers are also 
covered
+            # download jattach
+            curl -s -L -o /tmp/jattach 
https://github.com/apangin/jattach/releases/download/v2.1/jattach
+            if command -v sha256sum &> /dev/null; then
+              # verify hash of jattach binary
+              sha256sum -c <(echo 
"07885fdc782e02e7302c6d190f54c3930afa10a38140365adf54076ec1086a8e  
/tmp/jattach") || exit 1
+            fi
+            chmod +x /tmp/jattach
+            for java_pid in $(sudo pgrep java); do
+              echo "----------------------- pid $java_pid 
-----------------------"
+              echo "command line: $(sudo cat /proc/$java_pid/cmdline | xargs 
-0 echo)"
+              sudo /tmp/jattach $java_pid jcmd VM.command_line || true
+              sudo /tmp/jattach $java_pid jcmd "Thread.print -l"
+              sudo /tmp/jattach $java_pid jcmd GC.heap_info || true
+            done
+          else
+            for java_pid in $(jps -q -J-XX:+PerfDisableSharedMem); do
+              echo "----------------------- pid $java_pid 
-----------------------"
+              jcmd $java_pid VM.command_line || true
+              jcmd $java_pid Thread.print -l
+              jcmd $java_pid GC.heap_info || true
+            done
+          fi
+          exit 0
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
deleted file mode 100644
index 4785ce649..000000000
--- a/azure-pipelines.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2023 Ververica Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Maven
-# Build your Java project and run tests with Apache Maven.
-# Add steps that analyze code, save build artifacts, deploy, and more:
-# https://docs.microsoft.com/azure/devops/pipelines/languages/java
-
-trigger:
-  branches:
-    include:
-      - '*'  # must quote since "*" is a YAML reserved character; we want a 
string
-# Define variables:
-# - See tools/azure-pipelines/jobs-template.yml for a short summary of the 
caching
-# - See 
https://stackoverflow.com/questions/60742105/how-can-i-access-a-secret-value-from-an-azure-pipelines-expression
-#   to understand why the secrets are handled like this
-variables:
-  MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
-  E2E_CACHE_FOLDER: $(Pipeline.Workspace)/e2e_cache
-  E2E_TARBALL_CACHE: $(Pipeline.Workspace)/e2e_artifact_cache
-  MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
-  CACHE_KEY: maven | $(Agent.OS) | **/pom.xml, !**/target/**
-  CACHE_FALLBACK_KEY: maven | $(Agent.OS)
-  FLINK_ARTIFACT_DIR: $(Pipeline.Workspace)/flink_artifact
-
-stages:
-  # CI / PR triggered stage:
-  - stage: ci
-    displayName: "CI build (custom builders)"
-    jobs:
-      - template: tools/azure-pipelines/jobs-template.yml
-        parameters: # see template file for a definition of the parameters.
-          stage_name: ci_build
-          test_pool_definition:
-            vmImage: 'ubuntu-20.04'
-          run_end_to_end: false
-          jdk: 8
diff --git a/tools/azure-pipelines/create_build_artifact.sh 
b/tools/azure-pipelines/create_build_artifact.sh
deleted file mode 100755
index ab34b50b3..000000000
--- a/tools/azure-pipelines/create_build_artifact.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-echo "Creating build artifact dir $FLINK_ARTIFACT_DIR"
-
-cp -r . "$FLINK_ARTIFACT_DIR"
-
-echo "Minimizing artifact files"
-
-# reduces the size of the artifact directory to speed up
-# the packing&upload / download&unpacking process
-# by removing files not required for subsequent stages
-
-# jars are re-built in subsequent stages, so no need to cache them (cannot be 
avoided)
-find "$FLINK_ARTIFACT_DIR" -maxdepth 8 -type f -name '*.jar' | xargs rm -rf
-
-# .git directory
-# not deleting this can cause build stability issues
-# merging the cached version sometimes fails
-rm -rf "$FLINK_ARTIFACT_DIR/.git"
-
diff --git a/tools/azure-pipelines/debug_files_utils.sh 
b/tools/azure-pipelines/debug_files_utils.sh
deleted file mode 100755
index 232c41afa..000000000
--- a/tools/azure-pipelines/debug_files_utils.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-function prepare_debug_files {
-       MODULE=$@
-       export DEBUG_FILES_OUTPUT_DIR="$AGENT_TEMPDIRECTORY/debug_files"
-       export DEBUG_FILES_NAME="$(echo $MODULE | tr -c '[:alnum:]\n\r' 
'_')-$(date +%s)"
-       echo "##vso[task.setvariable 
variable=DEBUG_FILES_OUTPUT_DIR]$DEBUG_FILES_OUTPUT_DIR"
-       echo "##vso[task.setvariable 
variable=DEBUG_FILES_NAME]$DEBUG_FILES_NAME"
-       mkdir -p $DEBUG_FILES_OUTPUT_DIR || { echo "FAILURE: cannot create 
debug files directory '${DEBUG_FILES_OUTPUT_DIR}'." ; exit 1; }
-}
diff --git a/tools/azure-pipelines/jobs-template.yml 
b/tools/azure-pipelines/jobs-template.yml
deleted file mode 100644
index 386862279..000000000
--- a/tools/azure-pipelines/jobs-template.yml
+++ /dev/null
@@ -1,197 +0,0 @@
-# Copyright 2023 Ververica Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-parameters:
-  test_pool_definition: # defines the hardware pool for compilation and unit 
test execution.
-  stage_name: # defines a unique identifier for all jobs in a stage (in case 
the jobs are added multiple times to a stage)
-  run_end_to_end: # if set to 'true', the end to end tests will be executed
-  jdk: # the jdk version to use
-
-jobs:
-  - job: compile_${{parameters.stage_name}}
-    # succeeded() is needed to allow job cancellation
-    condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
-    pool: ${{parameters.test_pool_definition}}
-    timeoutInMinutes: 40
-    cancelTimeoutInMinutes: 1
-    workspace:
-      clean: all # this cleans the entire workspace directory before running a 
new job
-      # It is necessary because the custom build machines are reused for tests.
-      # See also 
https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?view=azure-devops&tabs=yaml#workspace
-
-    steps:
-      # The cache task is persisting the .m2 directory between builds, so that
-      # we do not have to re-download all dependencies from maven central for
-      # each build. The hope is that downloading the cache is faster than
-      # all dependencies individually.
-      # In this configuration, we use a hash over all committed (not 
generated) .pom files
-      # as a key for the build cache (CACHE_KEY). If we have a cache miss on 
the hash
-      # (usually because a pom file has changed), we'll fall back to a key 
without
-      # the pom files (CACHE_FALLBACK_KEY).
-      # Offical documentation of the Cache task: 
https://docs.microsoft.com/en-us/azure/devops/pipelines/caching/?view=azure-devops
-      - task: Cache@2
-        inputs:
-          key: $(CACHE_KEY)
-          restoreKeys: $(CACHE_FALLBACK_KEY)
-          path: $(MAVEN_CACHE_FOLDER)
-        continueOnError: true # continue the build even if the cache fails.
-        displayName: Cache Maven local repo
-      - script: |
-          echo "##vso[task.setvariable 
variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
-          echo "##vso[task.setvariable 
variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
-        displayName: "Set JDK"
-      # Compile
-      - script: |
-          ./tools/ci/compile.sh || exit $?
-          ./tools/azure-pipelines/create_build_artifact.sh
-        displayName: Compile
-
-      # upload artifacts for next stage
-      - task: PublishPipelineArtifact@1
-        inputs:
-          targetPath: $(FLINK_ARTIFACT_DIR)
-          artifact: FlinkCompileArtifact-${{parameters.stage_name}}
-
-  - job: test_1_${{parameters.stage_name}}
-    dependsOn: compile_${{parameters.stage_name}}
-    condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
-    pool: ${{parameters.test_pool_definition}}
-    timeoutInMinutes: 70
-    cancelTimeoutInMinutes: 1
-    workspace:
-      clean: all
-    strategy:
-      matrix:
-        core:
-          module: core
-        pipeline_connectors:
-          module: pipeline_connectors
-        postgres:
-          module: postgres
-        oracle:
-          module: oracle
-        mongodb:
-          module: mongodb
-        sqlserver:
-          module: sqlserver
-        tidb:
-          module: tidb
-        db2:
-          module: db2
-        vitess:
-          module: vitess
-        misc:
-          module: misc
-    steps:
-      # download artifact from compile stage
-      - task: DownloadPipelineArtifact@2
-        inputs:
-          path: $(FLINK_ARTIFACT_DIR)
-          artifact: FlinkCompileArtifact-${{parameters.stage_name}}
-
-      - script: ./tools/azure-pipelines/unpack_build_artifact.sh
-        displayName: "Unpack Build artifact"
-
-      - task: Cache@2
-        inputs:
-          key: $(CACHE_KEY)
-          restoreKeys: $(CACHE_FALLBACK_KEY)
-          path: $(MAVEN_CACHE_FOLDER)
-        continueOnError: true # continue the build even if the cache fails.
-        condition: not(eq('${{parameters.test_pool_definition.name}}', 
'Default'))
-        displayName: Cache Maven local repo
-
-      - script: |
-          echo "##vso[task.setvariable 
variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
-          echo "##vso[task.setvariable 
variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
-        displayName: "Set JDK"
-
-      - script: sudo sysctl -w kernel.core_pattern=core.%p
-        displayName: Set coredump pattern
-
-      # Test
-      - script: ./tools/azure-pipelines/uploading_watchdog.sh 
./tools/ci/test_controller.sh $(module)
-        displayName: Test - $(module)
-
-      - task: PublishTestResults@2
-        condition: succeededOrFailed()
-        inputs:
-          testResultsFormat: 'JUnit'
-
-      # upload debug artifacts
-      - task: PublishPipelineArtifact@1
-        condition: not(eq('$(DEBUG_FILES_OUTPUT_DIR)', ''))
-        displayName: Upload Logs
-        inputs:
-          targetPath: $(DEBUG_FILES_OUTPUT_DIR)
-          artifact: logs-${{parameters.stage_name}}-$(DEBUG_FILES_NAME)
-
-  - job: test_2_${{parameters.stage_name}}
-    dependsOn: compile_${{parameters.stage_name}}
-    condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
-    pool: ${{parameters.test_pool_definition}}
-    timeoutInMinutes: 90
-    cancelTimeoutInMinutes: 1
-    workspace:
-      clean: all
-    strategy:
-      matrix:
-        mysql:
-          module: mysql
-        oceanbase:
-          module: oceanbase
-        e2e:
-          module: e2e
-    steps:
-      # download artifact from compile stage
-      - task: DownloadPipelineArtifact@2
-        inputs:
-          path: $(FLINK_ARTIFACT_DIR)
-          artifact: FlinkCompileArtifact-${{parameters.stage_name}}
-
-      - script: ./tools/azure-pipelines/unpack_build_artifact.sh
-        displayName: "Unpack Build artifact"
-
-      - task: Cache@2
-        inputs:
-          key: $(CACHE_KEY)
-          restoreKeys: $(CACHE_FALLBACK_KEY)
-          path: $(MAVEN_CACHE_FOLDER)
-        continueOnError: true # continue the build even if the cache fails.
-        condition: not(eq('${{parameters.test_pool_definition.name}}', 
'Default'))
-        displayName: Cache Maven local repo
-
-      - script: |
-          echo "##vso[task.setvariable 
variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
-          echo "##vso[task.setvariable 
variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
-        displayName: "Set JDK"
-
-      - script: sudo sysctl -w kernel.core_pattern=core.%p
-        displayName: Set coredump pattern
-
-      # Test
-      - script: ./tools/azure-pipelines/uploading_watchdog.sh 
./tools/ci/test_controller.sh $(module)
-        displayName: Test - $(module)
-
-      - task: PublishTestResults@2
-        condition: succeededOrFailed()
-        inputs:
-          testResultsFormat: 'JUnit'
-
-      # upload debug artifacts
-      - task: PublishPipelineArtifact@1
-        condition: not(eq('$(DEBUG_FILES_OUTPUT_DIR)', ''))
-        displayName: Upload Logs
-        inputs:
-          targetPath: $(DEBUG_FILES_OUTPUT_DIR)
-          artifact: logs-${{parameters.stage_name}}-$(DEBUG_FILES_NAME)
\ No newline at end of file
diff --git a/tools/azure-pipelines/unpack_build_artifact.sh 
b/tools/azure-pipelines/unpack_build_artifact.sh
deleted file mode 100755
index 367c07c97..000000000
--- a/tools/azure-pipelines/unpack_build_artifact.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-
-if ! [ -e $FLINK_ARTIFACT_DIR ]; then
-    echo "Cached flink dir $FLINK_ARTIFACT_DIR does not exist. Exiting build."
-    exit 1
-fi
-
-echo "Merging cache"
-cp -RT "$FLINK_ARTIFACT_DIR" "."
-
-echo "Adjusting timestamps"
-
-# adjust timestamps to prevent recompilation
-find . -type f -name '*.java' | xargs touch
-find . -type f -name '*.class' | xargs touch
diff --git a/tools/azure-pipelines/uploading_watchdog.sh 
b/tools/azure-pipelines/uploading_watchdog.sh
deleted file mode 100755
index 50156f391..000000000
--- a/tools/azure-pipelines/uploading_watchdog.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env bash
-# Copyright 2023 Ververica Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This file has the following tasks
-# a) It reads the e2e timeout from the configuration file
-# b) It prints a warning if the test has reached 80% of it's execution time
-# c) N minutes before the end of the execution time, it will start uploading 
the current output as azure artifacts
-
-COMMAND=$@
-
-HERE="`dirname \"$0\"`"             # relative
-HERE="`( cd \"$HERE\" && pwd )`"    # absolutized and normalized
-if [ -z "$HERE" ] ; then
-  exit 1
-fi
-
-source "${HERE}/../ci/controller_utils.sh"
-
-source ./tools/azure-pipelines/debug_files_utils.sh
-prepare_debug_files "$AGENT_JOBNAME"
-export FLINK_LOG_DIR="$DEBUG_FILES_OUTPUT_DIR/flink-logs"
-mkdir $FLINK_LOG_DIR || { echo "FAILURE: cannot create log directory 
'${FLINK_LOG_DIR}'." ; exit 1; }
-sudo apt-get install -y moreutils
-
-REAL_START_SECONDS=$(date +"%s")
-REAL_END_SECONDS=$(date -d "$SYSTEM_PIPELINESTARTTIME + $SYSTEM_JOBTIMEOUT 
minutes" +"%s")
-REAL_TIMEOUT_SECONDS=$(($REAL_END_SECONDS - $REAL_START_SECONDS))
-KILL_SECONDS_BEFORE_TIMEOUT=$((2 * 60))
-
-echo "Running command '$COMMAND' with a timeout of $(($REAL_TIMEOUT_SECONDS / 
60)) minutes."
-
-MAIN_PID_FILE="/tmp/uploading_watchdog_main.pid"
-
-function timeout_watchdog() {
-  # 95%
-  sleep $(($REAL_TIMEOUT_SECONDS * 95 / 100))
-  echo 
"=========================================================================================="
-  echo "=== WARNING: This task took already 95% of the available time budget 
of $((REAL_TIMEOUT_SECONDS / 60)) minutes ==="
-  echo 
"=========================================================================================="
-  print_stacktraces | tee "$DEBUG_FILES_OUTPUT_DIR/jps-traces.0"
-
-  # final stack trace and kill processes 1 min before timeout
-  local secondsToKill=$(($REAL_END_SECONDS - $(date +"%s") - 
$KILL_SECONDS_BEFORE_TIMEOUT))
-  if [[ $secondsToKill -lt 0 ]]; then
-    secondsToKill=0
-  fi
-  sleep $(secondsToKill)
-  print_stacktraces | tee "$DEBUG_FILES_OUTPUT_DIR/jps-traces.1"
-
-  echo "============================="
-  echo "=== WARNING: Killing task ==="
-  echo "============================="
-  pkill -P $(<$MAIN_PID_FILE) # kill descendants
-  kill $(<$MAIN_PID_FILE)     # kill process itself
-
-  exit 42
-}
-
-timeout_watchdog &
-WATCHDOG_PID=$!
-
-# ts from moreutils prepends the time to each line
-( $COMMAND & PID=$! ; echo $PID >$MAIN_PID_FILE ; wait $PID ) | ts | tee 
$DEBUG_FILES_OUTPUT_DIR/watchdog
-TEST_EXIT_CODE=${PIPESTATUS[0]}
-
-# successful execution, cleanup watchdog related things
-if [[ "$TEST_EXIT_CODE" == 0 ]]; then
-  kill $WATCHDOG_PID
-  rm $DEBUG_FILES_OUTPUT_DIR/watchdog
-  rm -f $DEBUG_FILES_OUTPUT_DIR/jps-traces.*
-fi
-
-# properly forward exit code
-exit $TEST_EXIT_CODE
diff --git a/tools/ci/compile.sh b/tools/ci/compile.sh
deleted file mode 100755
index 0cb5b8e5d..000000000
--- a/tools/ci/compile.sh
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-#
-# This file contains tooling for compiling Flink
-#
-
-HERE="`dirname \"$0\"`"             # relative
-HERE="`( cd \"$HERE\" && pwd )`"    # absolutized and normalized
-if [ -z "$HERE" ] ; then
-    exit 1  # fail
-fi
-CI_DIR="$HERE/../ci"
-MVN_CLEAN_COMPILE_OUT="/tmp/clean_compile.out"
-
-# source required ci scripts
-source "${CI_DIR}/stage.sh"
-source "${CI_DIR}/maven-utils.sh"
-
-echo "Maven version:"
-run_mvn -version
-
-echo 
"=============================================================================="
-echo "Compiling Flink CDC"
-echo 
"=============================================================================="
-
-EXIT_CODE=0
-
-run_mvn clean package -Dmaven.javadoc.skip=true -U -DskipTests | tee 
$MVN_CLEAN_COMPILE_OUT
-
-EXIT_CODE=${PIPESTATUS[0]}
-
-if [ $EXIT_CODE != 0 ]; then
-    echo 
"=============================================================================="
-    echo "Compiling Flink CDC failed."
-    echo 
"=============================================================================="
-
-    grep "0 Unknown Licenses" target/rat.txt > /dev/null
-
-    if [ $? != 0 ]; then
-        echo "License header check failure detected. Printing first 50 lines 
for convenience:"
-        head -n 50 target/rat.txt
-    fi
-
-    exit $EXIT_CODE
-fi
-
-exit $EXIT_CODE
-
diff --git a/tools/ci/controller_utils.sh b/tools/ci/controller_utils.sh
deleted file mode 100644
index 696f325b2..000000000
--- a/tools/ci/controller_utils.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-print_system_info() {
-    echo "CPU information"
-    lscpu
-
-    echo "Memory information"
-    cat /proc/meminfo
-
-    echo "Disk information"
-    df -hH
-
-    echo "Running build as"
-    whoami
-}
-
-print_stacktraces () {
-       echo 
"=============================================================================="
-       echo "The following Java processes are running (JPS)"
-       echo 
"=============================================================================="
-
-       JAVA_PROCESSES=`jps`
-       echo "$JAVA_PROCESSES"
-
-       local pids=( $(echo "$JAVA_PROCESSES" | awk '{print $1}') )
-
-       for pid in "${pids[@]}"; do
-               echo 
"=============================================================================="
-               echo "Printing stack trace of Java process ${pid}"
-               echo 
"=============================================================================="
-
-               jstack $pid
-       done
-}
-
diff --git a/tools/ci/google-mirror-settings.xml 
b/tools/ci/google-mirror-settings.xml
deleted file mode 100644
index 49e7ccdde..000000000
--- a/tools/ci/google-mirror-settings.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<!--
-Copyright 2023 Ververica Inc.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-<settings>
-  <mirrors>
-    <mirror>
-      <id>google-maven-central</id>
-      <name>GCS Maven Central mirror</name>
-      
<url>https://maven-central-eu.storage-download.googleapis.com/maven2/</url>
-      <mirrorOf>central</mirrorOf>
-    </mirror>
-  </mirrors>
-</settings>
diff --git a/tools/ci/log4j.properties b/tools/ci/log4j.properties
deleted file mode 100644
index 15d17b888..000000000
--- a/tools/ci/log4j.properties
+++ /dev/null
@@ -1,32 +0,0 @@
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#  
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-rootLogger.level=INFO
-rootLogger.appenderRef.out.ref=FileAppender
-# -----------------------------------------------------------------------------
-# Console (use 'console')
-# -----------------------------------------------------------------------------
-appender.console.name=ConsoleAppender
-appender.console.type=CONSOLE
-appender.console.layout.type=PatternLayout
-appender.console.layout.pattern=%d{HH:mm:ss,SSS} [%20t] %-5p %-60c %x - %m%n
-# -----------------------------------------------------------------------------
-# File (use 'file')
-# -----------------------------------------------------------------------------
-appender.file.name=FileAppender
-appender.file.type=FILE
-appender.file.fileName=${sys:log.dir}/mvn-${sys:mvn.forkNumber}.log
-appender.file.layout.type=PatternLayout
-appender.file.layout.pattern=%d{HH:mm:ss,SSS} [%20t] %-5p %-60c %x - %m%n
\ No newline at end of file
diff --git a/tools/ci/maven-utils.sh b/tools/ci/maven-utils.sh
deleted file mode 100755
index 646837ca1..000000000
--- a/tools/ci/maven-utils.sh
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env bash
-# Copyright 2023 Ververica Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Utility for invoking Maven in CI
-function run_mvn {
-       MVN_CMD="mvn"
-       if [[ "$M2_HOME" != "" ]]; then
-               MVN_CMD="${M2_HOME}/bin/mvn"
-       fi
-
-       ARGS=$@
-       INVOCATION="$MVN_CMD $MVN_GLOBAL_OPTIONS $ARGS"
-       if [[ "$MVN_RUN_VERBOSE" != "false" ]]; then
-               echo "Invoking mvn with '$INVOCATION'"
-       fi
-       eval $INVOCATION
-}
-export -f run_mvn
-
-function setup_maven {
-       set -e # fail if there was an error setting up maven
-       if [ ! -d "${MAVEN_VERSIONED_DIR}" ]; then
-         wget 
https://archive.apache.org/dist/maven/maven-3/${MAVEN_VERSION}/binaries/apache-maven-${MAVEN_VERSION}-bin.zip
-         unzip -d "${MAVEN_CACHE_DIR}" -qq 
"apache-maven-${MAVEN_VERSION}-bin.zip"
-         rm "apache-maven-${MAVEN_VERSION}-bin.zip"
-       fi
-
-       export M2_HOME="${MAVEN_VERSIONED_DIR}"
-       echo "##vso[task.setvariable variable=M2_HOME]$M2_HOME"
-
-       # just in case: clean up the .m2 home and remove invalid jar files
-       if [ -d "${HOME}/.m2/repository/" ]; then
-         find ${HOME}/.m2/repository/ -name "*.jar" -exec sh -c 'if ! zip -T 
{} >/dev/null ; then echo "deleting invalid file: {}"; rm -f {} ; fi' \;
-       fi
-
-       echo "Installed Maven ${MAVEN_VERSION} to ${M2_HOME}"
-       set +e
-}
-
-function set_mirror_config {
-       if [[ "$MAVEN_MIRROR_CONFIG_FILE" != "" ]]; then
-               echo "[WARN] Maven mirror already configured to 
$MAVEN_MIRROR_CONFIG_FILE"
-               exit 0;
-       fi
-
-       echo "Using Google mirror"
-       MAVEN_MIRROR_CONFIG_FILE="$CI_DIR/google-mirror-settings.xml"
-}
-
-function collect_coredumps {
-       local SEARCHDIR=$1
-       local TARGET_DIR=$2
-       echo "Searching for .dump, .dumpstream and related files in 
'$SEARCHDIR'"
-       for file in `find $SEARCHDIR -type f -regextype posix-extended -iregex 
'.*\.hprof|.*\.dump|.*\.dumpstream|.*hs.*\.log|.*/core(.[0-9]+)?$'`; do
-               echo "Moving '$file' to target directory ('$TARGET_DIR')"
-               mv $file $TARGET_DIR/$(echo $file | tr "/" "-")
-       done
-}
-
-function collect_dmesg {
-       local TARGET_DIR=$1
-       dmesg > $TARGET_DIR/dmesg.out
-}
-
-CI_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-
-MAVEN_VERSION="3.2.5"
-MAVEN_CACHE_DIR=${HOME}/maven_cache
-MAVEN_VERSIONED_DIR=${MAVEN_CACHE_DIR}/apache-maven-${MAVEN_VERSION}
-
-
-MAVEN_MIRROR_CONFIG_FILE=""
-set_mirror_config
-
-export MVN_GLOBAL_OPTIONS_WITHOUT_MIRROR=""
-# see 
https://developercommunity.visualstudio.com/content/problem/851041/microsoft-hosted-agents-run-into-maven-central-tim.html
-MVN_GLOBAL_OPTIONS_WITHOUT_MIRROR+="-Dmaven.wagon.http.pool=false "
-# logging 
-MVN_GLOBAL_OPTIONS_WITHOUT_MIRROR+="-Dorg.slf4j.simpleLogger.showDateTime=true 
-Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss.SSS 
-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
 "
-# suppress snapshot updates
-MVN_GLOBAL_OPTIONS_WITHOUT_MIRROR+="--no-snapshot-updates "
-# enable non-interactive batch mode
-MVN_GLOBAL_OPTIONS_WITHOUT_MIRROR+="-B "
-# globally control the build profile details
-MVN_GLOBAL_OPTIONS_WITHOUT_MIRROR+="$PROFILE "
-
-export MVN_GLOBAL_OPTIONS="${MVN_GLOBAL_OPTIONS_WITHOUT_MIRROR} "
-# use google mirror everywhere
-MVN_GLOBAL_OPTIONS+="--settings $MAVEN_MIRROR_CONFIG_FILE "
diff --git a/tools/ci/stage.sh b/tools/ci/stage.sh
deleted file mode 100755
index 869c4d530..000000000
--- a/tools/ci/stage.sh
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-STAGE_CORE="core"
-STAGE_PIPELINE_CONNECTORS="pipeline_connectors"
-STAGE_MYSQL="mysql"
-STAGE_POSTGRES="postgres"
-STAGE_ORACLE="oracle"
-STAGE_MONGODB="mongodb"
-STAGE_SQLSERVER="sqlserver"
-STAGE_TIDB="tidb"
-STAGE_OCEANBASE="oceanbase"
-STAGE_DB2="db2"
-STAGE_VITESS="vitess"
-STAGE_E2E="e2e"
-STAGE_MISC="misc"
-
-MODULES_CORE="\
-flink-cdc-cli,\
-flink-cdc-common,\
-flink-cdc-composer,\
-flink-cdc-runtime"
-
-MODULES_PIPELINE_CONNECTORS="\
-flink-cdc-connect/flink-cdc-pipeline-connectors"
-
-MODULES_MYSQL="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mysql-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mysql-cdc"
-
-MODULES_POSTGRES="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-postgres-cdc"
-
-MODULES_ORACLE="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oracle-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oracle-cdc"
-
-MODULES_MONGODB="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mongodb-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mongodb-cdc"
-
-MODULES_SQLSERVER="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-sqlserver-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-sqlserver-cdc"
-
-MODULES_TIDB="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-tidb-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-tidb-cdc"
-
-MODULES_OCEANBASE="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oceanbase-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oceanbase-cdc"
-
-MODULES_DB2="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-db2-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-db2-cdc"
-
-MODULES_VITESS="\
-flink-cdc-connect/flink-cdc-source-connectors/flink-connector-vitess-cdc,\
-flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-vitess-cdc"
-
-MODULES_E2E="\
-flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests,\
-flink-cdc-e2e-tests/flink-cdc-source-e2e-tests"
-
-function get_compile_modules_for_stage() {
-    local stage=$1
-
-    case ${stage} in
-        (${STAGE_CORE})
-            echo "-pl $MODULES_CORE -am"
-        ;;
-        (${STAGE_PIPELINE_CONNECTORS})
-            echo "-pl $MODULES_PIPELINE_CONNECTORS -am"
-        ;;
-        (${STAGE_MYSQL})
-            echo "-pl $MODULES_MYSQL -am"
-        ;;
-        (${STAGE_POSTGRES})
-            echo "-pl $MODULES_POSTGRES -am"
-        ;;
-        (${STAGE_ORACLE})
-            echo "-pl $MODULES_ORACLE -am"
-        ;;
-        (${STAGE_MONGODB})
-            echo "-pl $MODULES_MONGODB -am"
-        ;;
-        (${STAGE_SQLSERVER})
-            echo "-pl $MODULES_SQLSERVER -am"
-        ;;
-        (${STAGE_TIDB})
-            echo "-pl $MODULES_TIDB -am"
-        ;;
-        (${STAGE_OCEANBASE})
-            echo "-pl $MODULES_OCEANBASE -am"
-        ;;
-        (${STAGE_DB2})
-            echo "-pl $MODULES_DB2 -am"
-        ;;
-        (${STAGE_VITESS})
-            echo "-pl $MODULES_VITESS -am"
-        ;;
-        (${STAGE_E2E})
-            # compile everything; using the -am switch does not work with 
negated module lists!
-            # the negation takes precedence, thus not all required modules 
would be built
-            echo ""
-        ;;
-        (${STAGE_MISC})
-            # compile everything; using the -am switch does not work with 
negated module lists!
-            # the negation takes precedence, thus not all required modules 
would be built
-            echo ""
-        ;;
-    esac
-}
-
-function get_test_modules_for_stage() {
-    local stage=$1
-
-    local modules_core=$MODULES_CORE
-    local modules_pipeline_connectors=$MODULES_PIPELINE_CONNECTORS
-    local modules_mysql=$MODULES_MYSQL
-    local modules_postgres=$MODULES_POSTGRES
-    local modules_oracle=$MODULES_ORACLE
-    local modules_mongodb=$MODULES_MONGODB
-    local modules_sqlserver=$MODULES_SQLSERVER
-    local modules_tidb=$MODULES_TIDB
-    local modules_oceanbase=$MODULES_OCEANBASE
-    local modules_db2=$MODULES_DB2
-    local modules_vitess=$MODULES_VITESS
-    local modules_e2e=$MODULES_E2E
-    local negated_mysql=\!${MODULES_MYSQL//,/,\!}
-    local negated_postgres=\!${MODULES_POSTGRES//,/,\!}
-    local negated_oracle=\!${MODULES_ORACLE//,/,\!}
-    local negated_mongodb=\!${MODULES_MONGODB//,/,\!}
-    local negated_sqlserver=\!${MODULES_SQLSERVER//,/,\!}
-    local negated_tidb=\!${MODULES_TIDB//,/,\!}
-    local negated_oceanbase=\!${MODULES_OCEANBASE//,/,\!}
-    local negated_db2=\!${MODULES_DB2//,/,\!}
-    local negated_vitess=\!${MODULES_vitess//,/,\!}
-    local negated_e2e=\!${MODULES_E2E//,/,\!}
-    local 
modules_misc="$negated_mysql,$negated_postgres,$negated_oracle,$negated_mongodb,$negated_sqlserver,$negated_tidb,$negated_oceanbase,$negated_db2,$negated_vitess,$negated_e2e"
-
-    case ${stage} in
-        (${STAGE_CORE})
-            echo "-pl $modules_core"
-        ;;
-        (${STAGE_PIPELINE_CONNECTORS})
-            echo "-pl $modules_pipeline_connectors"
-        ;;
-        (${STAGE_MYSQL})
-            echo "-pl $modules_mysql"
-        ;;
-        (${STAGE_POSTGRES})
-            echo "-pl $modules_postgres"
-        ;;
-        (${STAGE_ORACLE})
-            echo "-pl $modules_oracle"
-        ;;
-        (${STAGE_MONGODB})
-            echo "-pl $modules_mongodb"
-        ;;
-        (${STAGE_SQLSERVER})
-            echo "-pl $modules_sqlserver"
-        ;;
-        (${STAGE_TIDB})
-            echo "-pl $modules_tidb"
-        ;;
-        (${STAGE_OCEANBASE})
-            echo "-pl $modules_oceanbase"
-        ;;
-        (${STAGE_DB2})
-            echo "-pl $modules_db2"
-        ;;
-        (${STAGE_VITESS})
-            echo "-pl $modules_vitess"
-        ;;
-        (${STAGE_E2E})
-            echo "-pl $modules_e2e"
-        ;;
-        (${STAGE_MISC})
-            echo "-pl $modules_misc"
-        ;;
-    esac
-}
diff --git a/tools/ci/test_controller.sh b/tools/ci/test_controller.sh
deleted file mode 100755
index 66de5a186..000000000
--- a/tools/ci/test_controller.sh
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-#
-# This file contains generic control over the test execution.
-#
-
-HERE="`dirname \"$0\"`"             # relative
-HERE="`( cd \"$HERE\" && pwd )`"    # absolutized and normalized
-if [ -z "$HERE" ] ; then
-       exit 1
-fi
-
-source "${HERE}/stage.sh"
-source "${HERE}/maven-utils.sh"
-source "${HERE}/controller_utils.sh"
-
-STAGE=$1
-
-# =============================================================================
-# Step 0: Check & print environment information & configure env
-# =============================================================================
-
-# check preconditions
-if [ -z "$DEBUG_FILES_OUTPUT_DIR" ] ; then
-       echo "ERROR: Environment variable 'DEBUG_FILES_OUTPUT_DIR' is not set 
but expected by test_controller.sh. Tests may use this location to store 
debugging files."
-       exit 1
-fi
-
-if [ ! -d "$DEBUG_FILES_OUTPUT_DIR" ] ; then
-       echo "ERROR: Environment variable 
DEBUG_FILES_OUTPUT_DIR=$DEBUG_FILES_OUTPUT_DIR points to a directory that does 
not exist"
-       exit 1
-fi
-
-if [ -z "$STAGE" ] ; then
-       echo "ERROR: Environment variable 'STAGE' is not set but expected by 
test_controller.sh. The variable refers to the stage being executed."
-       exit 1
-fi
-
-echo "Printing environment information"
-
-echo "PATH=$PATH"
-run_mvn -version
-echo "Commit: $(git rev-parse HEAD)"
-print_system_info
-
-# enable coredumps for this process
-ulimit -c unlimited
-
-# configure JVMs to produce heap dumps
-export JAVA_TOOL_OPTIONS="-XX:+HeapDumpOnOutOfMemoryError"
-
-# some tests provide additional logs if they find this variable
-export IS_CI=true
-
-export WATCHDOG_ADDITIONAL_MONITORING_FILES="$DEBUG_FILES_OUTPUT_DIR/mvn-*.log"
-
-source "${HERE}/watchdog.sh"
-
-# =============================================================================
-# Step 1: Rebuild jars and install Flink to local maven repository
-# =============================================================================
-
-LOG4J_PROPERTIES=${HERE}/log4j.properties
-MVN_LOGGING_OPTIONS="-Dlog.dir=${DEBUG_FILES_OUTPUT_DIR} 
-Dlog4j.configurationFile=file://$LOG4J_PROPERTIES"
-
-MVN_COMMON_OPTIONS="-Dfast $MVN_LOGGING_OPTIONS"
-MVN_COMPILE_OPTIONS="-DskipTests"
-MVN_COMPILE_MODULES=$(get_compile_modules_for_stage ${STAGE})
-
-CALLBACK_ON_TIMEOUT="print_stacktraces | tee 
${DEBUG_FILES_OUTPUT_DIR}/jps-traces.out"
-run_with_watchdog "run_mvn $MVN_COMMON_OPTIONS $MVN_COMPILE_OPTIONS $PROFILE 
$MVN_COMPILE_MODULES install" $CALLBACK_ON_TIMEOUT
-EXIT_CODE=$?
-
-if [ $EXIT_CODE != 0 ]; then
-       echo 
"=============================================================================="
-       echo "Compilation failure detected, skipping test execution."
-       echo 
"=============================================================================="
-       exit $EXIT_CODE
-fi
-
-
-# =============================================================================
-# Step 2: Run tests
-# =============================================================================
-
-MVN_TEST_MODULES=$(get_test_modules_for_stage ${STAGE})
-
-run_with_watchdog "run_mvn $MVN_COMMON_OPTIONS $PROFILE $MVN_TEST_MODULES 
verify" $CALLBACK_ON_TIMEOUT
-EXIT_CODE=$?
-
-# =============================================================================
-# Step 3: Put extra logs into $DEBUG_FILES_OUTPUT_DIR
-# =============================================================================
-
-collect_coredumps $(pwd) $DEBUG_FILES_OUTPUT_DIR
-
-# Exit code for CI build success/failure
-exit $EXIT_CODE
diff --git a/tools/ci/watchdog.sh b/tools/ci/watchdog.sh
deleted file mode 100755
index 5876db190..000000000
--- a/tools/ci/watchdog.sh
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/env bash
-################################################################################
-#  Copyright 2023 Ververica Inc.
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-#
-# This file contains a watchdog tool to monitor a task and potentially kill it 
after
-# not producing any output for $MAX_NO_OUTPUT seconds.
-#
-
-# Number of seconds w/o output before printing a stack trace and killing the 
watched process
-MAX_NO_OUTPUT=${MAX_NO_OUTPUT:-900}
-
-# Number of seconds to sleep before checking the output again
-SLEEP_TIME=${SLEEP_TIME:-20}
-
-# Internal fields
-CMD_OUT="/tmp/watchdog.out"
-CMD_PID="/tmp/watchdog.pid"
-CMD_EXIT="/tmp/watchdog.exit"
-
-
-# =============================================
-# Utility functions
-# ============================================= 
-
-max_of() {
-  local max number
-
-  max="$1"
-
-  for number in "${@:2}"; do
-    if ((number > max)); then
-      max="$number"
-    fi
-  done
-
-  printf '%d\n' "$max"
-}
-
-# Returns the highest modification time out of $CMD_OUT (which is the command 
output file)
-# and any file(s) named "mvn-*.log" (which are logging files created by 
Flink's tests)
-mod_time () {
-       CMD_OUT_MOD_TIME=`stat -c "%Y" $CMD_OUT`
-       ADDITIONAL_FILES_MOD_TIMES=`stat -c "%Y" 
$WATCHDOG_ADDITIONAL_MONITORING_FILES 2> /dev/null`
-       echo `max_of $CMD_OUT_MOD_TIME $ADDITIONAL_FILES_MOD_TIMES`
-}
-
-the_time() {
-       echo `date +%s`
-}
-
-# watchdog process
-
-watchdog () {
-       touch $CMD_OUT
-
-       while true; do
-               sleep $SLEEP_TIME
-
-               time_diff=$((`the_time` - `mod_time`))
-
-               if [ $time_diff -ge $MAX_NO_OUTPUT ]; then
-                       echo 
"=============================================================================="
-                       echo "Process produced no output for ${MAX_NO_OUTPUT} 
seconds."
-                       echo 
"=============================================================================="
-
-                       # run timeout callback
-                       $CALLBACK_ON_TIMEOUT
-
-                       echo "Killing process with pid=$(<$CMD_PID) and all 
descendants"
-                       pkill -P $(<$CMD_PID) # kill descendants
-                       kill $(<$CMD_PID) # kill process itself
-
-                       exit 1
-               fi
-       done
-}
-
-
-# =============================================
-# main function
-# =============================================
-
-# entrypoint
-function run_with_watchdog() {
-       local cmd="$1"
-       local CALLBACK_ON_TIMEOUT="$2"
-
-       watchdog &
-       WD_PID=$!
-       echo "STARTED watchdog (${WD_PID})."
-
-       echo "RUNNING '${cmd}'."
-
-       # Run $CMD and pipe output to $CMD_OUT for the watchdog. The PID is 
written to $CMD_PID to
-       # allow the watchdog to kill $CMD if it is not producing any output 
anymore. $CMD_EXIT contains
-       # the exit code. This is important for CI build life-cycle 
(success/failure).
-       ( $cmd & PID=$! ; echo $PID >&3 ; wait $PID ; echo $? >&4 ) 3>$CMD_PID 
4>$CMD_EXIT | tee $CMD_OUT
-
-       EXIT_CODE=$(<$CMD_EXIT)
-
-       echo "Process exited with EXIT CODE: ${EXIT_CODE}."
-
-       # Make sure to kill the watchdog in any case after $CMD has completed
-       echo "Trying to KILL watchdog (${WD_PID})."
-       ( kill $WD_PID 2>&1 ) > /dev/null
-
-       rm $CMD_PID
-       rm $CMD_EXIT
-
-       return $EXIT_CODE
-}
-
-

Reply via email to