This is an automated email from the ASF dual-hosted git repository.

jrmccluskey pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 405d2dbb67c Clean up unused Jenkins files (#37148)
405d2dbb67c is described below

commit 405d2dbb67cd8980593dfcb0a73f7fac7dd189ac
Author: Jack McCluskey <[email protected]>
AuthorDate: Thu Dec 18 15:10:14 2025 -0500

    Clean up unused Jenkins files (#37148)
    
    * Clean up unused Jenkins files
    
    * Update contributor-docs/committer-guide.md
    
    Co-authored-by: gemini-code-assist[bot] 
<176961590+gemini-code-assist[bot]@users.noreply.github.com>
    
    ---------
    
    Co-authored-by: gemini-code-assist[bot] 
<176961590+gemini-code-assist[bot]@users.noreply.github.com>
---
 .test-infra/jenkins/Committers.groovy              | 109 ----
 .test-infra/jenkins/CommonJobProperties.groovy     | 318 -----------
 .test-infra/jenkins/CommonTestProperties.groovy    |  84 ---
 .test-infra/jenkins/CronJobBuilder.groovy          |  52 --
 .../jenkins/InfluxDBCredentialsHelper.groovy       |  37 --
 .test-infra/jenkins/JavaTestProperties.groovy      |  26 -
 .test-infra/jenkins/Kubernetes.groovy              | 165 ------
 .test-infra/jenkins/LoadTestConfig.groovy          | 636 ---------------------
 .test-infra/jenkins/LoadTestsBuilder.groovy        | 136 -----
 .test-infra/jenkins/NexmarkBuilder.groovy          | 260 ---------
 .../jenkins/NexmarkDatabaseProperties.groovy       |  40 --
 .../NoPhraseTriggeringPostCommitBuilder.groovy     |  31 -
 .../PhraseTriggeringPostCommitBuilder.groovy       |  34 --
 .test-infra/jenkins/PostcommitJobBuilder.groovy    |  94 ---
 .test-infra/jenkins/PrecommitJobBuilder.groovy     | 163 ------
 .test-infra/jenkins/PythonTestProperties.groovy    |  46 --
 .test-infra/jenkins/README.md                      |  26 -
 .test-infra/jenkins/TpcdsDatabaseProperties.groovy |  42 --
 .test-infra/jenkins/build.gradle                   |  48 --
 .../jenkins/metrics_report/dashboards_parser.py    | 115 ----
 .../jenkins/metrics_report/report_generator.py     | 230 --------
 .../jenkins/metrics_report/requirements.txt        |  24 -
 .../templates/Metrics_Report.template              | 141 -----
 .test-infra/jenkins/metrics_report/tox.ini         |  36 --
 contributor-docs/committer-guide.md                |   9 -
 settings.gradle.kts                                |   2 -
 .../site/content/en/documentation/io/testing.md    |   8 -
 27 files changed, 2912 deletions(-)

diff --git a/.test-infra/jenkins/Committers.groovy 
b/.test-infra/jenkins/Committers.groovy
deleted file mode 100644
index fdbb6150e5f..00000000000
--- a/.test-infra/jenkins/Committers.groovy
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/**
- * This is used to populate the list of allowed people that can trigger the 
jobs
- * that are not allowed to be triggered by non-committers from GitHub pull 
requests.
- */
-
-class Committers {
-  final static List GITHUB_USERNAMES = [
-    "suztomo",
-    "bjchambers",
-    "angoenka",
-    "ihji",
-    "aljoscha",
-    "iemejia",
-    "udim",
-    "jbonofre",
-    "timrobertson100",
-    "tweise",
-    "dmvk",
-    "jkff",
-    "xumingming",
-    "tgroh",
-    "kanterov",
-    "robertwb",
-    "dhalperi",
-    "jwills",
-    "kennknowles",
-    "alexvanboxel",
-    "swegner",
-    "TheNeuralBit",
-    "aaltay",
-    "damondouglas",
-    "mxm",
-    "griscz",
-    "charlesccychen",
-    "manuzhang",
-    "pabloem",
-    "mosche",
-    "StephanEwen",
-    "youngoli",
-    "steveniemitz",
-    "lgajowy",
-    "amaliujia",
-    "jasonkuster",
-    "kileys",
-    "kkucharc",
-    "emilymye",
-    "markflyhigh",
-    "KevinGG",
-    "matthiasa4",
-    "brucearctor",
-    "alanmyrvold",
-    "y1chi",
-    "aviemzur",
-    "apilloud",
-    "kw2542",
-    "rezarokni",
-    "egalpin",
-    "Abacn",
-    "davorbonaci",
-    "echauchot",
-    "tvalentyn",
-    "JingsongLi",
-    "lukecwik",
-    "robinyqiu",
-    "chamikaramj",
-    "Ardagan",
-    "lostluck",
-    "je-ik",
-    "herohde",
-    "aijamalnk",
-    "Hannah-Jiang",
-    "ibzib",
-    "kamilwu",
-    "melap",
-    "reuvenlax",
-    "sunjincheng121",
-    "xinyuiscool",
-    "adude3141",
-    "riteshghorse",
-    "mwalenia",
-    "akedin",
-    "aromanenko-dev",
-    "AnandInguva",
-    "jrmccluskey",
-    "yifanzou",
-    "boyuanzz",
-    "damccorm",
-    "johnjcasey"
-  ]
-}
diff --git a/.test-infra/jenkins/CommonJobProperties.groovy 
b/.test-infra/jenkins/CommonJobProperties.groovy
deleted file mode 100644
index 0f63b5de49f..00000000000
--- a/.test-infra/jenkins/CommonJobProperties.groovy
+++ /dev/null
@@ -1,318 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Contains functions that help build Jenkins projects. Functions typically set
-// common properties that are shared among all Jenkins projects.
-// Code in this directory should conform to the Groovy style guide.
-//  http://groovy-lang.org/style-guide.html
-
-import Committers as committers
-import PythonTestProperties as pythonTestProperties
-
-class CommonJobProperties {
-
-  static String checkoutDir = 'src'
-  final static String JAVA_8_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
-  final static String JAVA_11_HOME = '/usr/lib/jvm/java-11-openjdk-amd64'
-  final static String JAVA_17_HOME = '/usr/lib/jvm/java-17-openjdk-amd64'
-  final static String PYTHON = pythonTestProperties.DEFAULT_INTERPRETER
-
-  // Sets common top-level job properties for main repository jobs.
-  static void setTopLevelMainJobProperties(def context,
-      String defaultBranch = 'master',
-      int defaultTimeout = 100,
-      boolean allowRemotePoll = true,
-      String jenkinsExecutorLabel = 'beam',
-      boolean cleanWorkspace = true,
-      int numBuildsToRetain = -1) {
-    // GitHub project.
-    context.properties {
-      githubProjectUrl('https://github.com/apache/beam/')
-    }
-
-    // Set JDK version.
-    context.jdk('jdk_1.8_latest')
-
-    // Restrict this project to run only on Jenkins executors as specified
-    context.label(jenkinsExecutorLabel)
-
-    // Discard old builds. Build records are only kept up to this number of 
days.
-    context.logRotator {
-      daysToKeep(30)
-      numToKeep(numBuildsToRetain)
-    }
-
-    // Source code management.
-    context.scm {
-      git {
-        remote {
-          github("apache/beam")
-          // Single quotes here mean that ${ghprbPullId} is not interpolated 
and instead passed
-          // through to Jenkins where it refers to the environment variable.
-          refspec('+refs/heads/*:refs/remotes/origin/* ' +
-              
'+refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*')
-        }
-        branch('${sha1}')
-        extensions {
-          wipeOutWorkspace()
-          relativeTargetDirectory(checkoutDir)
-          cloneOptions {
-            shallow()
-            noTags()
-          }
-          if (!allowRemotePoll) {
-            disableRemotePoll()
-          }
-        }
-      }
-    }
-
-    context.parameters {
-      // This is a recommended setup if you want to run the job manually. The
-      // ${sha1} parameter needs to be provided, and defaults to the main 
branch.
-      stringParam(
-          'sha1',
-          defaultBranch,
-          'Commit id or refname (eg: origin/pr/9/head) you want to build.')
-    }
-
-    context.wrappers {
-      // Abort the build if it's stuck for more minutes than specified.
-      timeout {
-        absolute(defaultTimeout)
-        abortBuild()
-      }
-
-      environmentVariables {
-        // Set SPARK_LOCAL_IP for spark tests.
-        env('SPARK_LOCAL_IP', '127.0.0.1')
-      }
-      credentialsBinding {
-        string("CODECOV_TOKEN", "beam-codecov-token")
-        string("COVERALLS_REPO_TOKEN", "beam-coveralls-token")
-        usernamePassword("GRADLE_ENTERPRISE_CACHE_USERNAME", 
"GRADLE_ENTERPRISE_CACHE_PASSWORD", "beam_cache_node_credentials")
-      }
-      timestamps()
-      colorizeOutput()
-    }
-
-    if (cleanWorkspace) {
-      context.publishers {
-        // Clean after job completes.
-        wsCleanup()
-      }
-    }
-  }
-
-  // Sets the pull request build trigger. Accessed through precommit methods
-  // below to insulate callers from internal parameter defaults.
-  static void setPullRequestBuildTrigger(context,
-      String commitStatusContext,
-      String prTriggerPhrase = '',
-      boolean onlyTriggerPhraseToggle = true,
-      boolean prPermitAll = true,
-      List<String> triggerPathPatterns = [],
-      List<String> excludePathPatterns = []) {
-    context.triggers {
-      githubPullRequest {
-        admins(['asfbot'])
-        useGitHubHooks()
-        permitAll(prPermitAll)
-        if (!prPermitAll) {
-          userWhitelist(committers.GITHUB_USERNAMES)
-        }
-        // prTriggerPhrase is the argument which gets set when we want to allow
-        // post-commit builds to run against pending pull requests. This block
-        // overrides the default trigger phrase with the new one. Setting this
-        // will disable automatic invocation of this build; the phrase will be
-        // required to start it.
-        if (prTriggerPhrase) {
-          triggerPhrase(prTriggerPhrase)
-        }
-        if (onlyTriggerPhraseToggle) {
-          onlyTriggerPhrase()
-        }
-        if (!triggerPathPatterns.isEmpty()) {
-          includedRegions(triggerPathPatterns.join('\n'))
-        }
-        if (!excludePathPatterns.isEmpty()) {
-          excludedRegions(excludePathPatterns)
-        }
-
-        extensions {
-          commitStatus {
-            // This is the name that will show up in the GitHub pull request UI
-            // for this Jenkins project. It has a limit of 255 characters.
-            delegate.context commitStatusContext.take(255)
-          }
-
-          // Comment messages after build completes.
-          buildStatus {
-            completedStatus('SUCCESS', '--none--')
-            completedStatus('FAILURE', '--none--')
-            completedStatus('ERROR', '--none--')
-          }
-        }
-      }
-    }
-  }
-
-  // Default maxWorkers is 12 to avoid jvm oom as in [BEAM-4847].
-  static void setGradleSwitches(context, maxWorkers = 8) {
-    def defaultSwitches = [
-      // Continue the build even if there is a failure to show as many 
potential failures as possible.
-      '--continue',
-    ]
-
-    for (String gradle_switch : defaultSwitches) {
-      context.switches(gradle_switch)
-    }
-    context.switches("--max-workers=${maxWorkers}")
-
-    // Ensure that parallel workers don't exceed total available memory.
-
-    // Workers are n1-highmem-16 with 104GB
-    // 2 Jenkins executors * 8 Gradle workers * 6GB = 96GB
-    context.switches("-Dorg.gradle.jvmargs=-Xms2g")
-    context.switches("-Dorg.gradle.jvmargs=-Xmx6g")
-
-    // Disable file system watching for CI builds
-    // Builds are performed on a clean clone and files aren't modified, so
-    // there's no value in watching for changes.
-    context.switches("-Dorg.gradle.vfs.watch=false")
-
-    // Include dependency licenses when build docker images on Jenkins, see 
https://s.apache.org/zt68q
-    context.switches("-Pdocker-pull-licenses")
-  }
-
-  // Enable triggering postcommit runs against pull requests. Users can 
comment the trigger phrase
-  // specified in the postcommit job and have the job run against their PR to 
run
-  // tests not in the presubmit suite for additional confidence.
-  static void enablePhraseTriggeringFromPullRequest(context,
-      String commitStatusName,
-      String prTriggerPhrase,
-      boolean prPermitAll = true) {
-    setPullRequestBuildTrigger(
-        context,
-        commitStatusName,
-        prTriggerPhrase,
-        true,
-        prPermitAll)
-  }
-
-  // Sets this as a cron job, running on a schedule.
-  static void setCronJob(context, String buildSchedule) {
-    context.triggers {
-      cron(buildSchedule)
-    }
-  }
-
-  // Sets common config for jobs which run on a schedule; optionally on push
-  static void setAutoJob(context,
-      String buildSchedule = 'H H/6 * * *',
-      notifyAddress = '[email protected]',
-      emailIndividuals = false) {
-
-    // Set build triggers
-    context.triggers {
-      // By default runs every 6 hours.
-      cron(buildSchedule)
-    }
-
-    context.publishers {
-      // Notify an email address for each failed build (defaults to builds@).
-      mailer(
-          notifyAddress,
-          /* _do_ notify every unstable build */ false,
-          /* do not email individuals */ false)
-
-      extendedEmail {
-        triggers {
-          aborted {
-            recipientList(notifyAddress)
-          }
-          if (emailIndividuals) {
-            firstFailure {
-              sendTo {
-                firstFailingBuildSuspects()
-              }
-            }
-          }
-        }
-      }
-    }
-  }
-
-  static def mapToArgString(LinkedHashMap<String, String> inputArgs) {
-    List argList = []
-    inputArgs.each({ // FYI: Replacement only works with double quotes.
-      key, value ->
-      argList.add("--$key=$value")
-    })
-    return argList.join(' ')
-  }
-
-  // Namespace must contain lower case alphanumeric characters or '-'
-  static String getKubernetesNamespace(def jobName) {
-    jobName = jobName.replaceAll("_", "-").toLowerCase()
-    return "${jobName}-\${BUILD_ID}"
-  }
-
-  static String getKubeconfigLocationForNamespace(def namespace) {
-    return '$WORKSPACE/' + "config-${namespace}"
-  }
-
-  /**
-   * Transforms pipeline options to a string of format like below:
-   * ["--pipelineOption=123", "--pipelineOption2=abc", ...]
-   *
-   * @param pipelineOptions A map of pipeline options.
-   */
-  static String joinPipelineOptions(Map pipelineOptions) {
-    List<String> pipelineArgList = []
-    pipelineOptions.each({ key, value ->
-      pipelineArgList.add("\"--$key=$value\"")
-    })
-    return "[" + pipelineArgList.join(',') + "]"
-  }
-
-  /**
-   * Transforms pipeline options to a string of format like below:
-   * ["--pipelineOption=123", "--pipelineOption2=abc", ...]
-   *
-   * Use this variant when some options values contain json as string.
-   *
-   * @param pipelineOptions A map of pipeline options.
-   */
-  static String joinOptionsWithNestedJsonValues(Map pipelineOptions) {
-    List<String> pipelineArgList = []
-    pipelineOptions.each({ key, value ->
-      pipelineArgList.add("\"--$key=${value.replaceAll("\"", "\\\\\\\\\"")}\"")
-    })
-    return "[" + pipelineArgList.join(',') + "]"
-  }
-
-
-  /**
-   * Returns absolute path to beam project's files.
-   * @param path A relative path to project resource.
-   */
-  static String makePathAbsolute(String path) {
-    return '"$WORKSPACE/' + path + '"'
-  }
-}
diff --git a/.test-infra/jenkins/CommonTestProperties.groovy 
b/.test-infra/jenkins/CommonTestProperties.groovy
deleted file mode 100644
index 0670b96ef47..00000000000
--- a/.test-infra/jenkins/CommonTestProperties.groovy
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-
-class CommonTestProperties {
-  enum SDK {
-    PYTHON,
-    JAVA,
-    GO,
-  }
-
-  static String getFlinkVersion() {
-    return "1.17"
-  }
-
-  static String getSparkVersion() {
-    return "3"
-  }
-
-  enum Runner {
-    DATAFLOW("DataflowRunner"),
-    TEST_DATAFLOW("TestDataflowRunner"),
-    SPARK("SparkRunner"),
-    SPARK_STRUCTURED_STREAMING("SparkStructuredStreamingRunner"),
-    FLINK("FlinkRunner"),
-    DIRECT("DirectRunner"),
-    PORTABLE("PortableRunner")
-
-    def RUNNER_DEPENDENCY_MAP = [
-      JAVA: [
-        DATAFLOW: ":runners:google-cloud-dataflow-java",
-        TEST_DATAFLOW: ":runners:google-cloud-dataflow-java",
-        SPARK: ":runners:spark:${CommonTestProperties.getSparkVersion()}",
-        SPARK_STRUCTURED_STREAMING: 
":runners:spark:${CommonTestProperties.getSparkVersion()}",
-        FLINK: ":runners:flink:${CommonTestProperties.getFlinkVersion()}",
-        DIRECT: ":runners:direct-java"
-      ],
-      PYTHON: [
-        DATAFLOW: "DataflowRunner",
-        TEST_DATAFLOW: "TestDataflowRunner",
-        DIRECT: "DirectRunner",
-        PORTABLE: "PortableRunner"
-      ],
-      GO: [
-        DATAFLOW: "DataflowRunner",
-        SPARK: "SparkRunner",
-        FLINK: "FlinkRunner",
-        DIRECT: "DirectRunner",
-        PORTABLE: "PortableRunner",
-      ],
-    ]
-
-    private final String option
-
-    Runner(String option) {
-      this.option = option
-    }
-
-    String getDependencyBySDK(SDK sdk) {
-      RUNNER_DEPENDENCY_MAP.get(sdk.toString()).get(this.toString())
-    }
-  }
-
-  enum TriggeringContext {
-    PR,
-    POST_COMMIT
-  }
-}
diff --git a/.test-infra/jenkins/CronJobBuilder.groovy 
b/.test-infra/jenkins/CronJobBuilder.groovy
deleted file mode 100644
index b363168631b..00000000000
--- a/.test-infra/jenkins/CronJobBuilder.groovy
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import CommonJobProperties as commonJobProperties
-
-/**
- * Use this class to define jobs that are triggered only using cron.
- */
-class CronJobBuilder {
-  private def scope
-  private def jobDefinition
-
-  CronJobBuilder(scope, jobDefinition = {}) {
-    this.scope = scope
-    this.jobDefinition = jobDefinition
-  }
-
-  /**
-   * Set the job details.
-   *
-   * @param nameBase Job name
-   * @param scope Delegate for the job.
-   * @param cronPattern Defines when the job should be fired. Default: "every 
6th hour".
-   * @param jobDefinition Closure for the job.
-   */
-  static void cronJob(nameBase, cronPattern = 'H H/6 * * *', scope, 
jobDefinition = {}) {
-    CronJobBuilder builder = new CronJobBuilder(scope, jobDefinition)
-    builder.defineAutoPostCommitJob(nameBase, cronPattern)
-  }
-
-  void defineAutoPostCommitJob(name, cronPattern) {
-    def autoBuilds = scope.job(name) {
-      commonJobProperties.setAutoJob(delegate, cronPattern, 
'[email protected]', true)
-    }
-
-    autoBuilds.with(jobDefinition)
-  }
-}
diff --git a/.test-infra/jenkins/InfluxDBCredentialsHelper.groovy 
b/.test-infra/jenkins/InfluxDBCredentialsHelper.groovy
deleted file mode 100644
index f03fcde4312..00000000000
--- a/.test-infra/jenkins/InfluxDBCredentialsHelper.groovy
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-class InfluxDBCredentialsHelper {
-  final static String InfluxDBHost = '10.128.0.96'
-  final static String InfluxDBPort = '8086'
-  final static String InfluxDBHostUrl = 'http://' + InfluxDBHost + ':' + 
InfluxDBPort
-  final static String InfluxDBDatabaseName = 'beam_test_metrics'
-
-  /**
-   * Binds InfluxDB user's credentials to environment variables.
-   *
-   * @param job - jenkins job.
-   */
-  public static void useCredentials(job) {
-    job.wrappers {
-      credentialsBinding {
-        usernamePassword('INFLUXDB_USER', 'INFLUXDB_USER_PASSWORD', 
'beam-influxdb-user-creds')
-      }
-    }
-  }
-}
diff --git a/.test-infra/jenkins/JavaTestProperties.groovy 
b/.test-infra/jenkins/JavaTestProperties.groovy
deleted file mode 100644
index 5403cee5cf9..00000000000
--- a/.test-infra/jenkins/JavaTestProperties.groovy
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-class JavaTestProperties {
-  final static List<String> SUPPORTED_CONTAINER_TASKS = [
-    'java8',
-    'java11',
-    'java17',
-    'java21'
-  ]
-}
diff --git a/.test-infra/jenkins/Kubernetes.groovy 
b/.test-infra/jenkins/Kubernetes.groovy
deleted file mode 100644
index 957c823cbd8..00000000000
--- a/.test-infra/jenkins/Kubernetes.groovy
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/** Facilitates creation of jenkins steps to setup and cleanup Kubernetes 
infrastructure. */
-class Kubernetes {
-
-  private static final String KUBERNETES_DIR = 
'"$WORKSPACE/src/.test-infra/kubernetes"'
-
-  private static final String KUBERNETES_SCRIPT = 
"${KUBERNETES_DIR}/kubernetes.sh"
-
-  private static final String DEFAULT_CLUSTER = 'io-datastores'
-
-  private static def job
-
-  private static String kubeconfigLocation
-
-  private static String namespace
-
-  private static String cluster
-
-  private Kubernetes(job, String kubeconfigLocation, String namespace, String 
cluster) {
-    this.job = job
-    this.kubeconfigLocation = kubeconfigLocation
-    this.namespace = namespace
-    this.cluster = cluster
-  }
-
-  /**
-   * Creates separate kubeconfig, kubernetes namespace and specifies related 
cleanup steps.
-   *
-   * @param job - jenkins job
-   * @param kubeconfigLocation - place where kubeconfig will be created
-   * @param namespace - kubernetes namespace. If empty, the default namespace 
will be used
-   * @param cluster - name of the cluster to get credentials for
-   */
-  static Kubernetes create(job, String kubeconfigLocation, String namespace = 
'',
-      String cluster = DEFAULT_CLUSTER) {
-    Kubernetes kubernetes = new Kubernetes(job, kubeconfigLocation, namespace, 
cluster)
-    setupKubeconfig()
-    setupNamespace()
-    addCleanupSteps()
-    return kubernetes
-  }
-
-  private static void setupKubeconfig() {
-    job.steps {
-      shell("gcloud container clusters get-credentials ${cluster} 
--zone=us-central1-a")
-      shell("cp /home/jenkins/.kube/config ${kubeconfigLocation}")
-      environmentVariables {
-        env('KUBECONFIG', kubeconfigLocation)
-      }
-    }
-  }
-
-  private static void setupNamespace() {
-    if (!namespace.isEmpty()) {
-      job.steps {
-        shell("${KUBERNETES_SCRIPT} createNamespace ${namespace}")
-        environmentVariables {
-          env('KUBERNETES_NAMESPACE', namespace)
-        }
-      }
-    }
-  }
-
-  private static void addCleanupSteps() {
-    job.publishers {
-      postBuildScript {
-        buildSteps {
-          postBuildStep {
-            stopOnFailure(false)
-            results([
-              'FAILURE',
-              'SUCCESS',
-              'UNSTABLE',
-              'NOT_BUILT',
-              'ABORTED'
-            ])
-            buildSteps {
-              if (!namespace.isEmpty()) {
-                shell {
-                  command("${KUBERNETES_SCRIPT} deleteNamespace ${namespace}")
-                }
-              }
-              shell {
-                command("rm ${kubeconfigLocation}")
-              }
-            }
-          }
-        }
-        markBuildUnstable(false)
-      }
-    }
-  }
-
-  /**
-   * Specifies steps to run Kubernetes .yaml script.
-   */
-  void apply(String pathToScript) {
-    job.steps {
-      shell("${KUBERNETES_SCRIPT} apply ${pathToScript}")
-    }
-  }
-
-  /**
-   * Specifies steps that will save specified load balancer serivce address
-   * as an environment variable that can be used in later steps if needed.
-   *
-   * @param serviceName - name of the load balancer Kubernetes service
-   * @param referenceName - name of the environment variable
-   */
-  void loadBalancerIP(String serviceName, String referenceName) {
-    job.steps {
-      String command = "${KUBERNETES_SCRIPT} loadBalancerIP ${serviceName}"
-      shell("set -eo pipefail; eval ${command} | sed 's/^/${referenceName}=/' 
> job.properties")
-      environmentVariables {
-        propertiesFile('job.properties')
-      }
-    }
-  }
-
-  /**
-   * Specifies steps that will return an available port on the Kubernetes 
cluster,
-   * the value of the available port will be stored in job.properties using 
referenceName as key
-   *
-   * @param lowRangePort - low range port to be used
-   * @param highRangePort - high range port to be used
-   * @param referenceName - name of the environment variable
-   */
-  void availablePort(String lowRangePort, String highRangePort, String 
referenceName) {
-    job.steps {
-      String command = "${KUBERNETES_SCRIPT} getAvailablePort ${lowRangePort} 
${highRangePort}"
-      shell("set -xo pipefail; eval ${command} | sed 's/^/${referenceName}=/' 
> job.properties")
-      environmentVariables {
-        propertiesFile('job.properties')
-      }
-    }
-  }
-
-  /**
-   * Specifies steps to wait until a job finishes
-   * @param jobName - job running in Kubernetes cluster
-   * @param timeout - max time to wait for job to finish
-   */
-  void waitForJob(String jobName, String timeout){
-    job.steps{
-      String command="${KUBERNETES_SCRIPT} waitForJob ${jobName} ${timeout}"
-      shell("eval ${command}")
-    }
-  }
-}
diff --git a/.test-infra/jenkins/LoadTestConfig.groovy 
b/.test-infra/jenkins/LoadTestConfig.groovy
deleted file mode 100644
index 55e355ec7a1..00000000000
--- a/.test-infra/jenkins/LoadTestConfig.groovy
+++ /dev/null
@@ -1,636 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import CommonTestProperties.Runner
-import CommonTestProperties.SDK
-import LoadTestConfig.SerializableOption
-import groovy.json.JsonBuilder
-import org.codehaus.groovy.runtime.InvokerHelper
-
-import java.util.function.Predicate
-
-import static java.util.Objects.nonNull
-import static java.util.Objects.requireNonNull
-
-/**
- * This class contains simple DSL for load tests configuration. Configuration 
as Map<String, Serializable>
- * [{@link LoadTestConfig#config config} -- returns configuration map]
- * [{@link LoadTestConfig#templateConfig templateConfig} -- return 
LoadTestConfig reusable object]
- * [{@link LoadTestConfig#fromTemplate fromTemplate} -- returns configuration 
from given template].<br><br>
- *
- * Example:
- * <blockquote><pre>
- * LoadTestConfig template = templateConfig {
- *     title 'Load test'
- *     test 'org.apache.beam.sdk.loadtests.SomeLoadTests'
- *     dataflow()
- *     pipelineOptions {
- *         python()
- *         jobName 'Any job name'
- *         //other fields
- *     }
- *     specificParameters([
- *          fanout: 4
- *     ])
- * }
- * Map<String, Serializable> configMap = fromTemplate(template) {
- *     //fields can be changed or/and added
- *     portable()
- *     pipelineOptions {
- *         parallelism 5
- *         inputOptions {
- *             numRecords 20000
- *             keySize 1000
- *             valueSize 10
- *         }
- *     }
- * }
- * </pre></blockquote>
- */
-class LoadTestConfig implements SerializableOption<Map<String, Serializable>> {
-
-  private String _title
-  private String _test
-  private Runner _runner
-  private PipelineOptions _pipelineOptions
-
-  private LoadTestConfig() {}
-
-  void title(final String title) {
-    _title = title
-  }
-  void test(final String test) {
-    _test = test
-  }
-
-  //runners
-  void dataflow() { setRunnerAndUpdatePipelineOptions(Runner.DATAFLOW)}
-  void portable() { setRunnerAndUpdatePipelineOptions(Runner.PORTABLE) }
-
-  private void setRunnerAndUpdatePipelineOptions(final Runner runner) {
-    _runner = runner
-    final def pipeline = _pipelineOptions ?: new PipelineOptions()
-    pipeline.i_runner = runner
-    _pipelineOptions = pipeline
-  }
-
-  void pipelineOptions(final Closure cl = {}) {
-    final def options = _pipelineOptions ?: new PipelineOptions()
-    delegateAndInvoke(options, cl)
-    _pipelineOptions = options
-  }
-
-  /**
-   * Returns load test config object which can be reusable.</br>
-   * All possible fields that can be set:
-   * <blockquote><pre>
-   * templateConfig {
-   *     title        [String]
-   *     test         [String]
-   *     [dataflow(), portable()] -- runner
-   *     pipelineOptions {
-   *         [python(), java()] -- sdk
-   *         jobName                  [String]
-   *         appName                  [String]
-   *         project                  [String]
-   *         metricsDataset (python)  [String]
-   *         metricsTable (python)    [String]
-   *         numWorkers               [int]
-   *         parallelism              [int]
-   *         tempLocation             [String]
-   *         autoscalingAlgorithm     [String]
-   *         jobEndpoint              [String]
-   *         environmentType          [String]
-   *         environmentConfig        [String]
-   *         inputOptions/coInputOptions (for python) {
-   *             numRecords           [int]
-   *             keySize              [int]
-   *             valueSize            [int]
-   *             numHotKeys           [int]
-   *             hotKeyFraction       [int]
-   *         }
-   *         sourceOptions/coSourceOptions (for java) {
-   *             numRecords           [int]
-   *             keySizeBytes         [int]
-   *             valueSizeBytes       [int]
-   *             numHotKeys           [int]
-   *             hotKeyFraction       [int]
-   *             splitPointFrequencyRecords       [int]
-   *         }
-   *         stepOptions {
-   *             outputRecordsPerInputRecord      [int]
-   *             preservesInputKeyDistribution    [boolean]
-   *         }
-   *         specificParameters       [Map<String, Object>]
-   *     }
-   * }
-   * </pre></blockquote>
-   * @param cl Closure with fields setting
-   * @return LoadTestConfig object
-   */
-  static LoadTestConfig templateConfig(final Closure cl = {}) {
-    final def config = new LoadTestConfig()
-    delegateAndInvoke(config, cl)
-    return config
-  }
-
-  /**
-   * Returns configuration map from given template. Any field can be changed 
or/and added. Validation is performed
-   * before final map is returned (ex. Flink runner requires 
<b>environmentConfig</b> to be set). In case of
-   * validation failure exception is thrown.<br>
-   * Example result:
-   *<blockquote><pre>
-   * [
-   *  title          : 'any given title',
-   *  test           : 'org.apache.beam.sdk.loadtests.SomeLoadTests',
-   *  runner         : CommonTestProperties.Runner.DATAFLOW,
-   *  pipelineOptions: [
-   *    job_name            : 'any given job name',
-   *    publish_to_big_query: true,
-   *    project             : 'apache-beam-testing',
-   *    metrics_dataset     : 'given_dataset_name',
-   *    metrics_table       : 'given_table_name',
-   *    input_options       : '\'{"num_records": 200000000,"key_size": 
1,"value_size":9}\'',
-   *    iterations          : 1,
-   *    fanout              : 1,
-   *    parallelism         : 5,
-   *    job_endpoint        : 'localhost:1234',
-   *    environment_config  : 'given_environment_config',
-   *    environment_type    : 'given_environment_type'
-   *  ]
-   * ]
-   * </blockquote></pre>
-   * @param templateConfig LoadTestConfig instance
-   * @param cl Closure with fields setting
-   * @return configuration map
-   * @see LoadTestConfig
-   * @see LoadTestConfig#templateConfig
-   */
-  static Map<String, Serializable> fromTemplate(final LoadTestConfig 
templateConfig, final Closure cl = {}) {
-    final def newConfig = of(templateConfig)
-    delegateAndInvoke(newConfig, cl)
-    final def properties = newConfig.propertiesMap
-    verifyProperties(properties)
-    return ConfigHelper.convertProperties(properties)
-  }
-
-  /**
-   * Returns configuration map (see {@link LoadTestConfig#fromTemplate}) 
directly from given settings
-   * @param cl Closure with settings
-   * @return configuration map
-   */
-  static Map<String, Serializable> config(final Closure cl = {}) {
-    final def config = new LoadTestConfig()
-    delegateAndInvoke(config, cl)
-    final def properties = config.propertiesMap
-    verifyProperties(properties)
-    return ConfigHelper.convertProperties(config.propertiesMap)
-  }
-
-  private static void delegateAndInvoke(final delegate, final Closure cl = {}) 
{
-    final def code = cl.rehydrate(delegate, this, this)
-    code.resolveStrategy = Closure.DELEGATE_ONLY
-    code()
-  }
-
-  private static LoadTestConfig of(final LoadTestConfig oldConfig) {
-    final def newConfig = new LoadTestConfig()
-
-    //primitive values
-    InvokerHelper.setProperties(newConfig, oldConfig.propertiesMap)
-
-    //non-primitive values
-    newConfig._pipelineOptions = oldConfig._pipelineOptions ? 
PipelineOptions.of(oldConfig._pipelineOptions) : null
-
-    return newConfig
-  }
-
-  @Override
-  Map<String, Serializable> toPrimitiveValues() {
-    final def map = propertiesMap
-    verifyProperties(map)
-    return ConfigHelper.convertProperties(map)
-  }
-
-  LinkedHashMap<String, Object> getPropertiesMap() {
-    return [
-      _title: _title,
-      _test: _test,
-      _runner: _runner,
-      _pipelineOptions: _pipelineOptions
-    ]
-  }
-
-  private static void verifyProperties(final LinkedHashMap<String, Object> 
map) {
-    for (entry in map.entrySet()) {
-      requireNonNull(entry.value, "Missing ${entry.key.substring(1)} in 
configuration")
-    }
-  }
-
-  private static class PipelineOptions implements 
SerializableOption<Map<String, Serializable>> {
-    private Map<String, Object> _specificParameters = new HashMap<>()
-    private boolean _streaming = false
-    private SourceOptions _coSourceOptions
-    private InputOptions _coInputOptions
-    private StepOptions _stepOptions
-
-    //required
-    private String _project
-
-    //java required
-    private String _appName
-    private SourceOptions _sourceOptions
-
-    //python required
-    private String _metricsDataset
-    private String _metricsTable
-    private String _jobName
-    private InputOptions _inputOptions
-
-    //internal usage
-    private SDK i_sdk
-    private Runner i_runner
-    private static final i_required = ["_project"]
-    private static final i_dataflowRequired = [
-      "_numWorkers",
-      "_tempLocation",
-      "_autoscalingAlgorithm",
-      "_region"
-    ]
-    private static final i_portableRequired = [
-      "_jobEndpoint",
-      "_environmentType",
-      "_environmentConfig",
-      "_parallelism"
-    ]
-    private static final i_javaRequired = [
-      "_sourceOptions",
-      "_appName"
-    ]
-    private static final i_pythonRequired = [
-      "_metricsDataset",
-      "_metricsTable",
-      "_inputOptions",
-      "_jobName"
-    ]
-
-    //dataflow required
-    private def  _numWorkers
-    private String  _tempLocation
-    private String  _autoscalingAlgorithm
-    private String _region = 'us-central1'
-
-    //flink required
-    private String _jobEndpoint
-    private String _environmentType
-    private String _environmentConfig
-    private def _parallelism
-
-    void jobName(final String name) { _jobName = name }
-    void appName(final String name) { _appName = name }
-    void project(final String project) { _project = project }
-    void tempLocation(final String location) { _tempLocation = location }
-    void metricsDataset(final String dataset) { _metricsDataset = dataset }
-    void metricsTable(final String table) { _metricsTable = table }
-    void inputOptions(final InputOptions options) { _inputOptions = options }
-    void numWorkers(final int workers) { _numWorkers = workers }
-    void autoscalingAlgorithm(final String algorithm) { _autoscalingAlgorithm 
= algorithm }
-    void region(final String region) { _region = region }
-    void jobEndpoint(final String endpoint) { _jobEndpoint = endpoint }
-    void environmentType(final String type) { _environmentType = type }
-    void environmentConfig(final String config) { _environmentConfig = config }
-    void parallelism(final int parallelism) { _parallelism = parallelism }
-    void streaming(final boolean isStreaming) { _streaming = isStreaming }
-    void sourceOptions(final Closure cl = {}) { _sourceOptions = 
makeSourceOptions(cl) }
-    void coSourceOptions(final Closure cl = {}) { _coSourceOptions = 
makeSourceOptions(cl) }
-    void inputOptions(final Closure cl = {}) { _inputOptions = 
makeInputOptions(cl) }
-    void coInputOptions(final Closure cl = {}) { _coInputOptions = 
makeInputOptions(cl) }
-    void stepOptions(final Closure cl = {}) { _stepOptions = 
makeStepOptions(cl) }
-    void specificParameters(final Map<String, Object> map) { 
_specificParameters.putAll(map) }
-
-    //sdk -- snake_case vs camelCase
-    void python() { i_sdk = SDK.PYTHON }
-    void java() { i_sdk = SDK.JAVA }
-
-
-    private InputOptions makeInputOptions(final Closure cl = {}) {
-      return makeOptions(cl, _inputOptions ?: InputOptions.withSDK(i_sdk))
-    }
-
-    private SourceOptions makeSourceOptions(final Closure cl = {}) {
-      return makeOptions(cl, _sourceOptions ?: SourceOptions.withSDK(i_sdk))
-    }
-
-    private StepOptions makeStepOptions(final Closure cl = {}) {
-      return makeOptions(cl, _stepOptions ?: StepOptions.withSDK(i_sdk))
-    }
-
-    private <T> T makeOptions(final Closure cl = {}, final T options) {
-      final def code = cl.rehydrate(options, this, this)
-      code.resolveStrategy = Closure.DELEGATE_ONLY
-      code()
-      return options
-    }
-
-    @Override
-    Map<String, Serializable> toPrimitiveValues() {
-      final def map = propertiesMap
-      verifyPipelineProperties(map)
-      return ConfigHelper.convertProperties(map, i_sdk)
-    }
-
-    private void verifyPipelineProperties(final Map<String, Object> map) {
-      verifyRequired(map)
-      switch (i_runner) {
-        case Runner.DATAFLOW:
-          verifyDataflowProperties(map)
-          break
-        case Runner.PORTABLE:
-          verifyPortableProperties(map)
-          break
-        default:
-          break
-      }
-    }
-
-    private void verifyRequired(final Map<String, Object> map) {
-      verifyCommonRequired(map)
-      switch (i_sdk) {
-        case SDK.PYTHON:
-          verifyPythonRequired(map)
-          break
-        case SDK.JAVA:
-          verifyJavaRequired(map)
-          break
-        default:
-          break
-      }
-    }
-
-    private static void verifyCommonRequired(final Map<String, Object> map) {
-      verify(map, "") { i_required.contains(it.key) }
-    }
-
-    private static void verifyPythonRequired(final Map<String, Object> map) {
-      verify(map, "for Python SDK") { i_pythonRequired.contains(it.key) }
-    }
-
-    private static void verifyJavaRequired(final Map<String, Object> map) {
-      verify(map, "for Java SDK") { i_javaRequired.contains(it.key) }
-    }
-
-    private static void verifyDataflowProperties(final Map<String, Object> 
map) {
-      verify(map, "for Dataflow runner") { i_dataflowRequired.contains(it.key) 
}
-    }
-
-    private static void verifyPortableProperties(final Map<String, Object> 
map) {
-      verify(map, "for Portable runner") { i_portableRequired.contains(it.key) 
}
-    }
-
-    private static void verify(final Map<String, Object> map, final String 
message, final Predicate<Map.Entry<String, Object>> predicate) {
-      map.entrySet()
-          .stream()
-          .filter(predicate)
-          .forEach{ requireNonNull(it.value, "${it.key.substring(1)} is 
required " + message) }
-    }
-
-    static PipelineOptions of(final PipelineOptions options) {
-      final def newOptions = new PipelineOptions()
-
-      //primitive values
-      InvokerHelper.setProperties(newOptions, options.propertiesMap)
-
-      //non-primitive
-      newOptions._inputOptions = options._inputOptions ? 
InputOptions.of(options._inputOptions) : null
-      newOptions._coInputOptions = options._coInputOptions ? 
InputOptions.of(options._coInputOptions) : null
-      newOptions._sourceOptions = options._sourceOptions ? 
SourceOptions.of(options._sourceOptions) : null
-      newOptions._coSourceOptions = options._coSourceOptions ? 
SourceOptions.of(options._coSourceOptions) : null
-      newOptions._stepOptions = options._stepOptions ? 
StepOptions.of(options._stepOptions) : null
-      newOptions._specificParameters = new 
HashMap<>(options._specificParameters)
-
-      return newOptions
-    }
-
-    Map<String, Object> getPropertiesMap() {
-      return [
-        i_sdk: i_sdk,
-        i_runner: i_runner,
-        _jobName: _jobName,
-        _appName: _appName,
-        _project: _project,
-        _tempLocation: _tempLocation,
-        _metricsDataset: _metricsDataset,
-        _metricsTable: _metricsTable,
-        _numWorkers: _numWorkers,
-        _autoscalingAlgorithm: _autoscalingAlgorithm,
-        _region: _region,
-        _inputOptions: _inputOptions,
-        _coInputOptions: _coInputOptions,
-        _jobEndpoint: _jobEndpoint,
-        _environmentType: _environmentType,
-        _environmentConfig: _environmentConfig,
-        _parallelism: _parallelism,
-        _streaming: _streaming,
-        _sourceOptions: _sourceOptions,
-        _coSourceOptions: _coSourceOptions,
-        _stepOptions: _stepOptions
-      ].putAll(_specificParameters.entrySet())
-    }
-
-    private static class InputOptions implements SerializableOption<String> {
-      private def _numRecords
-      private def _keySize
-      private def _valueSize
-      private def _numHotKeys
-      private def _hotKeyFraction
-
-      //internal usage
-      private SDK i_sdk
-
-      private InputOptions() {}
-
-      static withSDK(final SDK sdk) {
-        final def input = new InputOptions()
-        input.i_sdk = sdk
-        return input
-      }
-
-      void numRecords(final int num) { _numRecords = num }
-      void keySize(final int size) { _keySize = size }
-      void valueSize(final int size) { _valueSize = size }
-      void numHotsKeys(final int num) { _numHotKeys = num }
-      void hotKeyFraction(final int fraction) { _hotKeyFraction = fraction }
-
-      @Override
-      String toPrimitiveValues() {
-        return "'${new 
JsonBuilder(ConfigHelper.convertProperties(propertiesMap, i_sdk)).toString()}'"
-      }
-
-      static InputOptions of(final InputOptions oldOptions) {
-        final def newOptions = new InputOptions()
-        InvokerHelper.setProperties(newOptions, oldOptions.propertiesMap)
-        return newOptions
-      }
-
-      LinkedHashMap<String, Object> getPropertiesMap() {
-        return [
-          i_sdk: i_sdk,
-          _numRecords: _numRecords,
-          _keySize: _keySize,
-          _valueSize: _valueSize,
-          _numHotKeys: _numHotKeys,
-          _hotKeyFraction: _hotKeyFraction
-        ] as LinkedHashMap<String, Object>
-      }
-    }
-
-    private static class SourceOptions implements SerializableOption<String> {
-      private def _numRecords
-      private def _keySizeBytes
-      private def _valueSizeBytes
-      private def _numHotKeys
-      private def _hotKeyFraction
-      private def _splitPointFrequencyRecords
-
-      //internal usage
-      private SDK i_sdk
-
-      private SourceOptions() {}
-
-      static withSDK(final SDK sdk) {
-        final def input = new SourceOptions()
-        input.i_sdk = sdk
-        return input
-      }
-
-      void numRecords(final int num) { _numRecords = num }
-      void keySizeBytes(final int size) { _keySizeBytes = size }
-      void valueSizeBytes(final int size) { _valueSizeBytes = size }
-      void numHotsKeys(final int num) { _numHotKeys = num }
-      void hotKeyFraction(final int fraction) { _hotKeyFraction = fraction }
-      void splitPointFrequencyRecords(final int splitPoint) { 
_splitPointFrequencyRecords = splitPoint }
-
-      @Override
-      String toPrimitiveValues() {
-        return new JsonBuilder(ConfigHelper.convertProperties(propertiesMap, 
i_sdk)).toString()
-      }
-
-      static SourceOptions of(final SourceOptions oldOptions) {
-        final def newOptions = new SourceOptions()
-        InvokerHelper.setProperties(newOptions, oldOptions.propertiesMap)
-        return newOptions
-      }
-
-      Map<String, Object> getPropertiesMap() {
-        return [
-          i_sdk: i_sdk,
-          _numRecords: _numRecords,
-          _keySizeBytes: _keySizeBytes,
-          _valueSizeBytes: _valueSizeBytes,
-          _numHotKeys: _numHotKeys,
-          _hotKeyFraction: _hotKeyFraction,
-          _splitPointFrequencyRecords: _splitPointFrequencyRecords
-        ]
-      }
-    }
-
-    private static class StepOptions implements SerializableOption<String> {
-      private def _outputRecordsPerInputRecord
-      private boolean  _preservesInputKeyDistribution
-
-      //internal usage
-      private SDK i_sdk
-
-      private StepOptions() {}
-
-      static withSDK(final SDK sdk) {
-        final def option = new StepOptions()
-        option.i_sdk = sdk
-        return option
-      }
-
-      void outputRecordsPerInputRecord(final int records) { 
_outputRecordsPerInputRecord = records }
-      void preservesInputKeyDistribution(final boolean  shouldPreserve) { 
_preservesInputKeyDistribution = shouldPreserve }
-
-      @Override
-      String toPrimitiveValues() {
-        return new JsonBuilder(ConfigHelper.convertProperties(propertiesMap, 
i_sdk)).toString()
-      }
-
-      Map<String, Object> getPropertiesMap() {
-        return [
-          i_sdk: i_sdk,
-          _outputRecordsPerInputRecord: _outputRecordsPerInputRecord,
-          _preservesInputKeyDistribution: _preservesInputKeyDistribution
-        ] as Map<String, Object>
-      }
-
-      static StepOptions of(final StepOptions oldOption) {
-        final def newOption = new StepOptions()
-        InvokerHelper.setProperties(newOption, oldOption.propertiesMap)
-        return newOption
-      }
-    }
-  }
-
-  private interface SerializableOption<T> {
-    T toPrimitiveValues()
-  }
-
-  private static class ConfigHelper {
-    private static final List<String> FIELDS_TO_REMOVE = ["class", "i_sdk", 
"i_runner"]
-
-    static Map<String, Serializable> convertProperties(final Map<String, 
Object> propertyMap, final SDK sdk = SDK.JAVA) {
-      return propertyMap
-          .findAll { nonNull(it.value) }
-          .findAll { !FIELDS_TO_REMOVE.contains(it.key) }
-          .collectEntries { key, value ->
-            [
-              modifyKey(key, sdk),
-              toPrimitive(value)
-            ]
-          } as Map<String, Serializable>
-    }
-
-    private static String modifyKey(final String key, final SDK sdk) {
-      final def result = key.startsWith('_') ? key.substring(1) : key
-      switch (sdk) {
-        case SDK.PYTHON:
-          return toSnakeCase(result)
-        case SDK.JAVA:
-          return toCamelCase(result)
-        default:
-          throw new IllegalArgumentException("SDK not specified")
-      }
-    }
-
-    private static String toSnakeCase(final String text) {
-      return text.replaceAll(/([A-Z])/, /_$1/).toLowerCase().replaceAll(/^_/, 
'')
-    }
-
-    private static String toCamelCase(final String text) {
-      return text.replaceAll( "(_)([A-Za-z0-9])", { Object[] it -> ((String) 
it[2]).toUpperCase() })
-    }
-
-    private static def toPrimitive(value) {
-      return value instanceof SerializableOption
-          ? value.toPrimitiveValues()
-          : value
-    }
-  }
-}
diff --git a/.test-infra/jenkins/LoadTestsBuilder.groovy 
b/.test-infra/jenkins/LoadTestsBuilder.groovy
deleted file mode 100644
index 060a2ea6542..00000000000
--- a/.test-infra/jenkins/LoadTestsBuilder.groovy
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import CommonJobProperties as commonJobProperties
-import CommonTestProperties.Runner
-import CommonTestProperties.SDK
-import CommonTestProperties.TriggeringContext
-import InfluxDBCredentialsHelper
-import static PythonTestProperties.LOAD_TEST_PYTHON_VERSION
-
-class LoadTestsBuilder {
-  final static String DOCKER_CONTAINER_REGISTRY = 
'gcr.io/apache-beam-testing/beam-sdk'
-  final static String GO_SDK_CONTAINER = 
"${DOCKER_CONTAINER_REGISTRY}/beam_go_sdk:latest"
-  final static String DOCKER_BEAM_SDK_IMAGE = 
"beam_python${LOAD_TEST_PYTHON_VERSION}_sdk:latest"
-  final static String DOCKER_BEAM_JOBSERVER = 
'gcr.io/apache-beam-testing/beam_portability'
-
-  static void loadTests(scope, CommonTestProperties.SDK sdk, List 
testConfigurations, String test, String mode,
-      List<String> jobSpecificSwitches = null) {
-    scope.description("Runs ${sdk.toString().toLowerCase().capitalize()} 
${test} load tests in ${mode} mode")
-
-    commonJobProperties.setTopLevelMainJobProperties(scope, 'master', 720)
-
-    for (testConfiguration in testConfigurations) {
-      loadTest(scope, testConfiguration.title, testConfiguration.runner, sdk, 
testConfiguration.pipelineOptions,
-          testConfiguration.test, jobSpecificSwitches)
-    }
-  }
-
-
-  static void loadTest(context, String title, Runner runner, SDK sdk, 
Map<String, ?> options,
-      String mainClass, List<String> jobSpecificSwitches = null, String 
requirementsTxtFile = null,
-      String pythonVersion = null) {
-    options.put('runner', runner.option)
-    InfluxDBCredentialsHelper.useCredentials(context)
-
-    context.steps {
-      shell("echo \"*** ${title} ***\"")
-      gradle {
-        rootBuildScriptDir(commonJobProperties.checkoutDir)
-        setGradleTask(delegate, runner, sdk, options, mainClass,
-            jobSpecificSwitches, requirementsTxtFile, pythonVersion)
-        commonJobProperties.setGradleSwitches(delegate)
-      }
-    }
-  }
-
-  static String parseOptions(Map<String, ?> options) {
-    options.collect { entry ->
-
-      if (entry.key.matches(".*\\s.*")) {
-        throw new IllegalArgumentException("""
-          Encountered invalid option name '${entry.key}'. Names must not
-          contain whitespace.
-          """)
-      }
-
-      // Flags are indicated by null values
-      if (entry.value == null) {
-        "--${entry.key}"
-      } else if (entry.value.toString().matches(".*\\s.*") &&
-      !entry.value.toString().matches("'[^']*'")) {
-        throw new IllegalArgumentException("""
-          Option '${entry.key}' has an invalid value, '${entry.value}'. Values
-          must not contain whitespace, or they must be wrapped in singe quotes.
-          """)
-      } else {
-        "--${entry.key}=$entry.value".replace('\"', '\\\"').replace('\'', 
'\\\'')
-      }
-    }.join(' ')
-  }
-
-  static String getBigQueryDataset(String baseName, TriggeringContext 
triggeringContext) {
-    if (triggeringContext == TriggeringContext.PR) {
-      return baseName + '_PRs'
-    } else {
-      return baseName
-    }
-  }
-
-  private static void setGradleTask(context, Runner runner, SDK sdk, 
Map<String, ?> options,
-      String mainClass, List<String> jobSpecificSwitches, String 
requirementsTxtFile = null,
-      String pythonVersion = null) {
-    context.tasks(getGradleTaskName(sdk))
-    context.switches("-PloadTest.mainClass=\"${mainClass}\"")
-    context.switches("-Prunner=${runner.getDependencyBySDK(sdk)}")
-    context.switches("-PloadTest.args=\"${parseOptions(options)}\"")
-    if (requirementsTxtFile != null){
-      
context.switches("-PloadTest.requirementsTxtFile=\"${requirementsTxtFile}\"")
-    }
-    if (jobSpecificSwitches != null) {
-      jobSpecificSwitches.each {
-        context.switches(it)
-      }
-    }
-
-    if (sdk == SDK.PYTHON) {
-      if (pythonVersion == null) {
-        context.switches("-PpythonVersion=${LOAD_TEST_PYTHON_VERSION}")
-      }
-      else {
-        context.switches("-PpythonVersion=${pythonVersion}")
-      }
-    }
-  }
-
-  private static String getGradleTaskName(SDK sdk) {
-    switch (sdk) {
-      case SDK.JAVA:
-        return ':sdks:java:testing:load-tests:run'
-      case SDK.PYTHON:
-        return ':sdks:python:apache_beam:testing:load_tests:run'
-      case SDK.GO:
-        return ':sdks:go:test:load:run'
-      default:
-        throw new RuntimeException("No task name defined for SDK: $SDK")
-    }
-  }
-}
-
-
-
diff --git a/.test-infra/jenkins/NexmarkBuilder.groovy 
b/.test-infra/jenkins/NexmarkBuilder.groovy
deleted file mode 100644
index 69fa3dcc427..00000000000
--- a/.test-infra/jenkins/NexmarkBuilder.groovy
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import CommonJobProperties as commonJobProperties
-import CommonTestProperties.Runner
-import CommonTestProperties.SDK
-import CommonTestProperties.TriggeringContext
-import InfluxDBCredentialsHelper
-import NexmarkDatabaseProperties
-
-// Class for building NEXMark jobs and suites.
-class NexmarkBuilder {
-  final static String DEFAULT_JAVA_RUNTIME_VERSION = "1.8";
-  final static String JAVA_11_RUNTIME_VERSION = "11";
-  final static String JAVA_17_RUNTIME_VERSION = "17";
-
-  private static Map<String, Object> defaultOptions = [
-    'manageResources': false,
-    'monitorJobs'    : true,
-  ] << NexmarkDatabaseProperties.nexmarkBigQueryArgs << 
NexmarkDatabaseProperties.nexmarkInfluxDBArgs
-
-  static void standardJob(context, Runner runner, SDK sdk, Map<String, Object> 
jobSpecificOptions, TriggeringContext triggeringContext) {
-    standardJob(context, runner, sdk, jobSpecificOptions, triggeringContext, 
null, DEFAULT_JAVA_RUNTIME_VERSION);
-  }
-
-  static void standardJob(context, Runner runner, SDK sdk, Map<String, Object> 
jobSpecificOptions, TriggeringContext triggeringContext, List<String> 
jobSpecificSwitches, String javaRuntimeVersion) {
-    Map<String, Object> options = getFullOptions(jobSpecificOptions, runner, 
triggeringContext)
-
-    options.put('streaming', false)
-    suite(context, "NEXMARK IN BATCH MODE USING ${runner} RUNNER", runner, 
sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-
-    options.put('streaming', true)
-    suite(context, "NEXMARK IN STREAMING MODE USING ${runner} RUNNER", runner, 
sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-
-    options.put('queryLanguage', 'sql')
-
-    options.put('streaming', false)
-    suite(context, "NEXMARK IN SQL BATCH MODE USING ${runner} RUNNER", runner, 
sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-
-    options.put('streaming', true)
-    suite(context, "NEXMARK IN SQL STREAMING MODE USING ${runner} RUNNER", 
runner, sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-
-    options.put('queryLanguage', 'zetasql')
-
-    options.put('streaming', false)
-    suite(context, "NEXMARK IN ZETASQL BATCH MODE USING ${runner} RUNNER", 
runner, sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-
-    options.put('streaming', true)
-    suite(context, "NEXMARK IN ZETASQL STREAMING MODE USING ${runner} RUNNER", 
runner, sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-  }
-
-  static void nonQueryLanguageJobs(context, Runner runner, SDK sdk, 
Map<String, Object> jobSpecificOptions, TriggeringContext triggeringContext, 
List<String> jobSpecificSwitches, String javaRuntimeVersion) {
-    Map<String, Object> options = getFullOptions(jobSpecificOptions, runner, 
triggeringContext)
-
-    options.put('streaming', false)
-    suite(context, "NEXMARK IN BATCH MODE USING ${runner} RUNNER", runner, 
sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-
-    options.put('streaming', true)
-    suite(context, "NEXMARK IN STREAMING MODE USING ${runner} RUNNER", runner, 
sdk, options, jobSpecificSwitches, javaRuntimeVersion)
-  }
-
-  static void batchOnlyJob(context, Runner runner, SDK sdk, Map<String, 
Object> jobSpecificOptions, TriggeringContext triggeringContext) {
-    Map<String, Object> options = getFullOptions(jobSpecificOptions, runner, 
triggeringContext)
-
-    options.put('streaming', false)
-    suite(context, "NEXMARK IN BATCH MODE USING ${runner} RUNNER", runner, 
sdk, options, null, DEFAULT_JAVA_RUNTIME_VERSION)
-
-    options.put('queryLanguage', 'sql')
-    suite(context, "NEXMARK IN SQL BATCH MODE USING ${runner} RUNNER", runner, 
sdk, options, null, DEFAULT_JAVA_RUNTIME_VERSION)
-
-    options.put('queryLanguage', 'zetasql')
-    suite(context, "NEXMARK IN ZETASQL BATCH MODE USING ${runner} RUNNER", 
runner, sdk, options, null, DEFAULT_JAVA_RUNTIME_VERSION)
-  }
-
-  static void standardPythonJob(context, Runner runner, SDK sdk, Map<String, 
Object> jobSpecificOptions, TriggeringContext triggeringContext) {
-    Map<String, Object> options = getFullOptions(jobSpecificOptions, runner, 
triggeringContext)
-
-    pythonSuite(context, "NEXMARK PYTHON IN BATCH MODE USING ${runner} 
RUNNER", runner, sdk, options)
-  }
-
-
-  private
-  static Map<String, Object> getFullOptions(Map<String, Object> 
jobSpecificOptions, Runner runner, TriggeringContext triggeringContext) {
-    Map<String, Object> options = defaultOptions + jobSpecificOptions
-
-    options.put('runner', runner.option)
-    options.put('bigQueryDataset', determineStorageName(triggeringContext))
-    options.put('baseInfluxMeasurement', 
determineStorageName(triggeringContext))
-    options
-  }
-
-
-  static void suite(context, String title, Runner runner, SDK sdk, Map<String, 
Object> options, List<String> jobSpecificSwitches, String javaRuntimeVersion) {
-
-    if (javaRuntimeVersion == JAVA_11_RUNTIME_VERSION) {
-      java11Suite(context, title, runner, sdk, options, jobSpecificSwitches)
-    } else if (javaRuntimeVersion == JAVA_17_RUNTIME_VERSION) {
-      java17Suite(context, title, runner, sdk, options, jobSpecificSwitches)
-    } else if(javaRuntimeVersion == DEFAULT_JAVA_RUNTIME_VERSION){
-      java8Suite(context, title, runner, sdk, options, jobSpecificSwitches)
-    }
-  }
-
-  static void java8Suite(context, String title, Runner runner, SDK sdk, 
Map<String, Object> options, List<String> jobSpecificSwitches) {
-    InfluxDBCredentialsHelper.useCredentials(context)
-    context.steps {
-      shell("echo \"*** RUN ${title} with Java 8 ***\"")
-      gradle {
-        rootBuildScriptDir(commonJobProperties.checkoutDir)
-        tasks(':sdks:java:testing:nexmark:run')
-        commonJobProperties.setGradleSwitches(delegate)
-        switches("-Pnexmark.runner=${runner.getDependencyBySDK(sdk)}")
-        switches("-Pnexmark.args=\"${parseOptions(options)}\"")
-        if (jobSpecificSwitches != null) {
-          jobSpecificSwitches.each {
-            switches(it)
-          }
-        }
-      }
-    }
-  }
-
-  static void java11Suite(context, String title, Runner runner, SDK sdk, 
Map<String, Object> options, List<String> jobSpecificSwitches) {
-    InfluxDBCredentialsHelper.useCredentials(context)
-    context.steps {
-      shell("echo \"*** RUN ${title} with Java 11***\"")
-
-      // Run with Java 11
-      gradle {
-        rootBuildScriptDir(commonJobProperties.checkoutDir)
-        tasks(':sdks:java:testing:nexmark:run')
-        commonJobProperties.setGradleSwitches(delegate)
-        switches("-PtestJavaVersion=11")
-        switches("-Pjava11Home=${commonJobProperties.JAVA_11_HOME}")
-        switches("-Pnexmark.runner=${runner.getDependencyBySDK(sdk)}")
-        switches("-Pnexmark.args=\"${parseOptions(options)}\"")
-        if (jobSpecificSwitches != null) {
-          jobSpecificSwitches.each {
-            switches(it)
-          }
-        }
-      }
-    }
-  }
-
-  static void java17Suite(context, String title, Runner runner, SDK sdk, 
Map<String, Object> options, List<String> jobSpecificSwitches) {
-    InfluxDBCredentialsHelper.useCredentials(context)
-    context.steps {
-      shell("echo \"*** RUN ${title} with Java 17***\"")
-
-      // Run with Java 17
-      gradle {
-        rootBuildScriptDir(commonJobProperties.checkoutDir)
-        tasks(':sdks:java:testing:nexmark:run')
-        commonJobProperties.setGradleSwitches(delegate)
-        switches("-PtestJavaVersion=17")
-        switches("-Pjava17Home=${commonJobProperties.JAVA_17_HOME}")
-        switches("-Pnexmark.runner=${runner.getDependencyBySDK(sdk)}")
-        switches("-Pnexmark.args=\"${parseOptions(options)}\"")
-        if (jobSpecificSwitches != null) {
-          jobSpecificSwitches.each {
-            switches(it)
-          }
-        }
-      }
-    }
-  }
-
-  static void pythonSuite(context, String title, Runner runner, SDK sdk, 
Map<String, Object> options) {
-    InfluxDBCredentialsHelper.useCredentials(context)
-
-    for (int i = 0; i <= 12; i ++) {
-      if (
-      // https://github.com/apache/beam/issues/24678
-      i == 1 ||
-      // https://github.com/apache/beam/issues/24679
-      i == 4 || i == 6 || i == 9 ||
-      // https://github.com/apache/beam/issues/24680
-      i == 12) {
-        continue
-      }
-      pythonTest(context, title, i, runner, sdk, options)
-    }
-  }
-
-  static void pythonTest(context, String title, int query, Runner runner, SDK 
sdk, Map<String, Object> options) {
-    context.steps {
-      shell("echo \"*** GENERATE events for ${title} query ${query} with 
Python***\"")
-
-      options.put('query', query)
-
-      // Matches defaults in NexmarkSuite.java
-      if (query == 4 || query == 6 || query == 9) {
-        options.put('numEvents', 10000)
-      } else {
-        options.put('numEvents', 100000)
-      }
-
-      String eventFile = options.get('tempLocation') + 
"/eventFiles/\${BUILD_TAG}/query${query}-"
-      options.remove('input')
-      options.put('generateEventFilePathPrefix', eventFile)
-
-      gradle {
-        rootBuildScriptDir(commonJobProperties.checkoutDir)
-        tasks(':sdks:java:testing:nexmark:run')
-        commonJobProperties.setGradleSwitches(delegate)
-        switches("-Pnexmark.runner=:runners:direct-java")
-        switches("-Pnexmark.args=\"${parseOptions(options)}\"")
-      }
-
-      shell("echo \"*** RUN ${title} query ${query} with Python***\"")
-
-      options.remove('generateEventFilePathPrefix')
-      options.put('input', eventFile + "\\*")
-
-      gradle {
-        rootBuildScriptDir(commonJobProperties.checkoutDir)
-        tasks(':sdks:python:apache_beam:testing:benchmarks:nexmark:run')
-        commonJobProperties.setGradleSwitches(delegate)
-        switches("-Pnexmark.args=\"${parseOptionsPython(options)}\"")
-      }
-    }
-  }
-
-  private static String parseOptions(Map<String, Object> options) {
-    options.collect { "--${it.key}=${it.value.toString()}" }.join(' ')
-  }
-
-  private static String parseOptionsPython(Map<String, Object> options) {
-    options.collect {
-      String key = it.key.toString().replaceAll("([a-z])([A-Z]+)", 
"\$1_\$2").toLowerCase()
-      if (it.value == false) {
-        return ""
-      }
-      if (it.value == true) {
-        return "--${key}"
-      }
-      return "--${key}=${it.value}"
-    }.join(' ')
-  }
-
-  private static String determineStorageName(TriggeringContext 
triggeringContext) {
-    triggeringContext == TriggeringContext.PR ? "nexmark_PRs" : "nexmark"
-  }
-}
diff --git a/.test-infra/jenkins/NexmarkDatabaseProperties.groovy 
b/.test-infra/jenkins/NexmarkDatabaseProperties.groovy
deleted file mode 100644
index 8a2d713abba..00000000000
--- a/.test-infra/jenkins/NexmarkDatabaseProperties.groovy
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import InfluxDBCredentialsHelper
-
-// contains Big query and InfluxDB related properties for Nexmark runs
-class NexmarkDatabaseProperties {
-
-  static Map<String, Object> nexmarkBigQueryArgs = [
-    'bigQueryTable'          : 'nexmark',
-    'bigQueryDataset'        : 'nexmark',
-    'project'                : 'apache-beam-testing',
-    'resourceNameMode'       : 'QUERY_RUNNER_AND_MODE',
-    'exportSummaryToBigQuery': true,
-    'tempLocation'           : 'gs://temp-storage-for-perf-tests/nexmark',
-  ]
-
-  static Map<String, Object> nexmarkInfluxDBArgs = [
-    'influxDatabase'         : InfluxDBCredentialsHelper.InfluxDBDatabaseName,
-    'influxHost'             : InfluxDBCredentialsHelper.InfluxDBHostUrl,
-    'baseInfluxMeasurement'  : 'nexmark',
-    'exportSummaryToInfluxDB': true,
-    'influxRetentionPolicy'  : 'forever',
-  ]
-}
diff --git a/.test-infra/jenkins/NoPhraseTriggeringPostCommitBuilder.groovy 
b/.test-infra/jenkins/NoPhraseTriggeringPostCommitBuilder.groovy
deleted file mode 100644
index 33b06ba39fe..00000000000
--- a/.test-infra/jenkins/NoPhraseTriggeringPostCommitBuilder.groovy
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import PostcommitJobBuilder
-/**
- * This class is an extension of PostCommitBuilder that disables github phrase 
triggering.
- */
-class NoPhraseTriggeringPostCommitBuilder extends PostcommitJobBuilder{
-  static void postCommitJob(nameBase,
-      githubUiHint,
-      scope,
-      jobDefinition = {}) {
-    PostcommitJobBuilder jb = new PostcommitJobBuilder(scope, jobDefinition)
-    jb.defineAutoPostCommitJob(nameBase)
-  }
-}
diff --git a/.test-infra/jenkins/PhraseTriggeringPostCommitBuilder.groovy 
b/.test-infra/jenkins/PhraseTriggeringPostCommitBuilder.groovy
deleted file mode 100644
index bccb6b6c904..00000000000
--- a/.test-infra/jenkins/PhraseTriggeringPostCommitBuilder.groovy
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * This class is to be used for defining postcommit jobs that are 
phrase-triggered only.
- *
- * Purpose of this class is to define common strategies and reporting/building 
parameters
- * for pre- and post- commit test jobs and unify them across the project.
- */
-class PhraseTriggeringPostCommitBuilder extends PostcommitJobBuilder {
-  static void postCommitJob(nameBase,
-      triggerPhrase,
-      githubUiHint,
-      scope,
-      jobDefinition = {}) {
-    new PostcommitJobBuilder(scope, jobDefinition).defineGhprbTriggeredJob(
-        nameBase + "_PR", triggerPhrase, githubUiHint, false)
-  }
-}
diff --git a/.test-infra/jenkins/PostcommitJobBuilder.groovy 
b/.test-infra/jenkins/PostcommitJobBuilder.groovy
deleted file mode 100644
index 26d03352895..00000000000
--- a/.test-infra/jenkins/PostcommitJobBuilder.groovy
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import CommonJobProperties as commonJobProperties
-
-/**
- * This class is to be used for defining jobs for post- and pre-commit tests.
- *
- * Purpose of this class is to define common strategies and reporting/building 
paramereters
- * for pre- and post- commit test jobs and unify them across the project.
- */
-class PostcommitJobBuilder {
-  private def scope
-  private def jobDefinition
-  private def job
-
-  PostcommitJobBuilder(scope, jobDefinition = {}) {
-    this.scope = scope
-    this.jobDefinition = jobDefinition
-    this.job = null
-  }
-
-  /**
-   * Set the job details.
-   *
-   * @param nameBase Job name for the postcommit job, a _PR suffix added if 
the trigger is set.
-   * @param triggerPhrase Phrase to trigger jobs, empty to not have a trigger.
-   * @param githubUiHint Short description in the github UI.
-   * @param scope Delegate for the job.
-   *        scope is expected to have the job property (set by Jenkins).
-   *        scope can have the following optional property:
-   *        - buildSchedule: the build schedule of the job. The default is 
every 6h ('H H/6 * * *')
-   * @param jobDefinition Closure for the job.
-   */
-  static void postCommitJob(nameBase,
-      triggerPhrase,
-      githubUiHint,
-      scope,
-      jobDefinition = {}) {
-    PostcommitJobBuilder jb = new PostcommitJobBuilder(scope, jobDefinition)
-    jb.defineAutoPostCommitJob(nameBase)
-    if (triggerPhrase) {
-      jb.defineGhprbTriggeredJob(nameBase + "_PR", triggerPhrase, 
githubUiHint, false)
-    }
-  }
-
-  void defineAutoPostCommitJob(name) {
-    // default build schedule
-    String buildSchedule = 'H H/6 * * *'
-    try {
-      buildSchedule = scope.getProperty('buildSchedule')
-    } catch (MissingPropertyException ignored) {
-      // do nothing
-    }
-    def autoBuilds = scope.job(name) {
-      commonJobProperties.setAutoJob delegate, buildSchedule, 
'[email protected]', true
-    }
-
-    autoBuilds.with(jobDefinition)
-  }
-
-  private void defineGhprbTriggeredJob(name, triggerPhrase, githubUiHint, 
triggerOnPrCommit) {
-    def ghprbBuilds = scope.job(name) {
-
-      // Execute concurrent builds if necessary.
-      concurrentBuild()
-      throttleConcurrentBuilds {
-        maxTotal(3)
-      }
-
-      commonJobProperties.setPullRequestBuildTrigger(
-          delegate,
-          githubUiHint,
-          triggerPhrase,
-          !triggerOnPrCommit)
-    }
-    ghprbBuilds.with(jobDefinition)
-  }
-}
diff --git a/.test-infra/jenkins/PrecommitJobBuilder.groovy 
b/.test-infra/jenkins/PrecommitJobBuilder.groovy
deleted file mode 100644
index d73c965fa13..00000000000
--- a/.test-infra/jenkins/PrecommitJobBuilder.groovy
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import CommonJobProperties as commonJobProperties
-
-/** This class defines PrecommitJobBuilder.build() helper for defining 
pre-comit jobs. */
-class PrecommitJobBuilder {
-  /** scope 'this' parameter from top-level script; used for binding Job DSL 
methods. */
-  Object scope
-
-  /** Base name for each post-commit suite job, i.e. 'Go'. */
-  String nameBase
-
-  /**  DEPRECATED: The Gradle task to execute. */
-  String gradleTask = null
-
-  /**  The Gradle tasks to execute. */
-  List<String> gradleTasks = []
-
-  /** If defined, set of additional switches to pass to Gradle. */
-  List<String> gradleSwitches = []
-
-  /** Overall job timeout. */
-  int timeoutMins = 120
-
-  /** If defined, set of path expressions used to trigger the job on commit. */
-  List<String> triggerPathPatterns = []
-
-  /** If defined, set of path expressions to not trigger the job on commit. */
-  List<String> excludePathPatterns = []
-
-  /** Whether to trigger on new PR commits. Useful to set to false when 
testing new jobs. */
-  boolean commitTriggering = true
-
-  /**
-   * Whether to trigger on cron run. Useful to set jobs that runs tasks 
covered by
-   * other test suites but are deemed to triggered on pull request only.
-   */
-  boolean cronTriggering = true
-
-  /**
-   * Whether to configure defaultPathTriggers.
-   * Set to false for PreCommit only runs on certain code path change.
-   */
-  boolean defaultPathTriggering = true
-
-  /** Number of builds to retain in history. */
-  int numBuildsToRetain = -1
-
-  /**
-   * Define a set of pre-commit jobs.
-   *
-   * @param additionalCustomization Job DSL closure with additional 
customization to apply to the job.
-   */
-  void build(Closure additionalCustomization = {}) {
-    if (cronTriggering) {
-      defineCronJob additionalCustomization
-    }
-    if (commitTriggering) {
-      defineCommitJob additionalCustomization
-    }
-    definePhraseJob additionalCustomization
-  }
-
-  /** Create a pre-commit job which runs on a regular schedule. */
-  private void defineCronJob(Closure additionalCustomization) {
-    def job = createBaseJob 'Cron'
-    job.with {
-      description buildDescription('on a regular schedule.')
-      commonJobProperties.setAutoJob delegate
-    }
-    job.with additionalCustomization
-  }
-
-  /** Create a pre-commit job which runs on every commit to a PR. */
-  private void defineCommitJob(Closure additionalCustomization) {
-    def job = createBaseJob 'Commit', true
-    def defaultPathTriggers = [
-      '^build.gradle$',
-      '^buildSrc/.*$',
-      '^gradle/.*$',
-      '^gradle.properties$',
-      '^gradlew$',
-      '^gradle.bat$',
-      '^settings.gradle.kts$'
-    ]
-    if (defaultPathTriggering && triggerPathPatterns) {
-      triggerPathPatterns.addAll defaultPathTriggers
-    }
-    job.with {
-      description buildDescription('for each commit push.')
-      concurrentBuild()
-      commonJobProperties.setPullRequestBuildTrigger(delegate,
-          githubUiHint(),
-          '',
-          false,
-          true,
-          triggerPathPatterns,
-          excludePathPatterns)
-    }
-    job.with additionalCustomization
-  }
-
-  private void definePhraseJob(Closure additionalCustomization) {
-    def job = createBaseJob 'Phrase'
-    job.with {
-      description buildDescription("on trigger phrase 
'${buildTriggerPhrase()}'.")
-      concurrentBuild()
-      commonJobProperties.setPullRequestBuildTrigger delegate, githubUiHint(), 
buildTriggerPhrase()
-    }
-    job.with additionalCustomization
-  }
-
-  private Object createBaseJob(nameSuffix, usesRegionFilter = false) {
-    def allowRemotePoll = !usesRegionFilter
-    return scope.job("beam_PreCommit_${nameBase}_${nameSuffix}") {
-      commonJobProperties.setTopLevelMainJobProperties(delegate,
-          'master',
-          timeoutMins,
-          allowRemotePoll,
-          'beam',
-          true,
-          numBuildsToRetain) // needed for included regions PR triggering; see 
[JENKINS-23606]
-      steps {
-        gradle {
-          rootBuildScriptDir(commonJobProperties.checkoutDir)
-          tasks(gradleTasks.join(' ') + (gradleTask ?: ""))
-          gradleSwitches.each { switches(it) }
-          commonJobProperties.setGradleSwitches(delegate)
-        }
-      }
-    }
-  }
-
-  /** The magic phrase used to trigger the job when posted as a PR comment. */
-  private String buildTriggerPhrase() {
-    return "Run ${nameBase} PreCommit"
-  }
-
-  /** A human-readable description which will be used as the base of all suite 
jobs. */
-  private buildDescription(String triggerDescription) {
-    return "Runs ${nameBase} PreCommit tests ${triggerDescription}"
-  }
-
-  private String githubUiHint() {
-    "${nameBase} (\"${buildTriggerPhrase()}\")"
-  }
-}
diff --git a/.test-infra/jenkins/PythonTestProperties.groovy 
b/.test-infra/jenkins/PythonTestProperties.groovy
deleted file mode 100644
index f050a29ea77..00000000000
--- a/.test-infra/jenkins/PythonTestProperties.groovy
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-class PythonTestProperties {
-  // Indicates all supported Python versions.
-  // This must be sorted in ascending order.
-  final static List<String> ALL_SUPPORTED_VERSIONS = [
-    '3.10',
-    '3.11',
-    '3.12',
-    '3.13'
-  ]
-  final static List<String> SUPPORTED_CONTAINER_TASKS = 
ALL_SUPPORTED_VERSIONS.collect {
-    "py${it.replace('.', '')}"
-  }
-  final static String LOWEST_SUPPORTED = ALL_SUPPORTED_VERSIONS[0]
-  final static String HIGHEST_SUPPORTED = ALL_SUPPORTED_VERSIONS[-1]
-  final static List<String> ESSENTIAL_VERSIONS = [
-    LOWEST_SUPPORTED,
-    HIGHEST_SUPPORTED
-  ]
-  final static List<String> CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS = 
ESSENTIAL_VERSIONS
-  final static List<String> 
CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS = 
[HIGHEST_SUPPORTED]
-  final static List<String> VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS = 
ALL_SUPPORTED_VERSIONS
-  final static String LOAD_TEST_PYTHON_VERSION = '3.10'
-  final static String RUN_INFERENCE_TEST_PYTHON_VERSION = '3.10'
-  final static String CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION = '3.10'
-  // Use for various shell scripts triggered by Jenkins.
-  // Gradle scripts should use project.ext.pythonVersion defined by 
PythonNature/BeamModulePlugin.
-  final static String DEFAULT_INTERPRETER = 'python3.10'
-}
diff --git a/.test-infra/jenkins/README.md b/.test-infra/jenkins/README.md
deleted file mode 100644
index aa1a35741b2..00000000000
--- a/.test-infra/jenkins/README.md
+++ /dev/null
@@ -1,26 +0,0 @@
-<!--
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
--->
-
-> **PLEASE update this file if you add new job or change name/trigger phrase 
in groovy files.**
-
-## Beam Jenkins
-
-**DEPRECATED:** As of November 2023, Beam CI has migrated to self-hosted 
GitHub Action: 
[link](https://github.com/apache/beam/blob/master/.github/workflows/README.md). 
New tests should be setup by GitHub Action, and the Jenkins jobs listed below 
is planned to be shutdown.
-
-All jobs have been migrated as of January 2023, this folder just contains 
remaining test resources which should be moved/cleaned up 
(https://github.com/apache/beam/issues/30112).
diff --git a/.test-infra/jenkins/TpcdsDatabaseProperties.groovy 
b/.test-infra/jenkins/TpcdsDatabaseProperties.groovy
deleted file mode 100644
index 289537bd070..00000000000
--- a/.test-infra/jenkins/TpcdsDatabaseProperties.groovy
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import InfluxDBCredentialsHelper
-
-// contains Big query and InfluxDB related properties for TPC-DS runs
-class TpcdsDatabaseProperties {
-
-  static Map<String, Object> tpcdsBigQueryArgs = [
-    'bigQueryTable'          : 'tpcds',
-    'bigQueryDataset'        : 'tpcds',
-    'project'                : 'apache-beam-testing',
-    'resourceNameMode'       : 'QUERY_RUNNER_AND_MODE',
-    'exportSummaryToBigQuery': true,
-    'tempLocation'           : 'gs://temp-storage-for-perf-tests/tpcds',
-  ]
-
-  static Map<String, Object> tpcdsInfluxDBArgs = [
-    'influxDatabase'         : InfluxDBCredentialsHelper.InfluxDBDatabaseName,
-    'influxHost'             : InfluxDBCredentialsHelper.InfluxDBHostUrl,
-    'baseInfluxMeasurement'  : 'tpcds',
-    'exportSummaryToInfluxDB': true,
-    'influxRetentionPolicy'  : 'forever',
-  ]
-
-  static String tpcdsQueriesArg = 
'3,7,10,25,26,29,35,38,40,42,43,52,55,69,79,83,84,87,93,96'
-}
diff --git a/.test-infra/jenkins/build.gradle b/.test-infra/jenkins/build.gradle
deleted file mode 100644
index 6a77c4b827e..00000000000
--- a/.test-infra/jenkins/build.gradle
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-plugins {
-  id 'org.apache.beam.module'
-}
-applyGroovyNature()
-applyPythonNature()
-
-task generateMetricsReport {
-  dependsOn setupVirtualenv
-  def metricsReportFilename = "beam-metrics_report.html"
-  def generateMetricsReportDir = 
"${rootDir}/.test-infra/jenkins/metrics_report"
-  def generateMetricsReportPath = 
"${generateMetricsReportDir}/${metricsReportFilename}"
-  def toxConfigFilePath = 
"${rootDir}/.test-infra/jenkins/metrics_report/tox.ini"
-
-  def influxDb = project.findProperty('influxDb')
-  def influxHost = project.findProperty('influxHost')
-  def influxPort = project.findProperty('influxPort')
-
-  doLast {
-    exec {
-      executable 'sh'
-      args '-c', ". ${envdir}/bin/activate && tox -e py310-test -c 
${toxConfigFilePath}"
-    }
-    exec {
-      executable 'sh'
-      args '-c', ". ${envdir}/bin/activate && tox -e py310-generate-report -c 
${toxConfigFilePath} -- --influx-db=${influxDb} --influx-host=${influxHost} 
--influx-port=${influxPort} --output-file=${generateMetricsReportPath}"
-    }
-    logger.info('Create metrics report file {}', generateMetricsReportPath)
-  }
-  outputs.file "${generateMetricsReportPath}"
-}
diff --git a/.test-infra/jenkins/metrics_report/dashboards_parser.py 
b/.test-infra/jenkins/metrics_report/dashboards_parser.py
deleted file mode 100644
index 5fdb208e2b5..00000000000
--- a/.test-infra/jenkins/metrics_report/dashboards_parser.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import json
-import os
-import re
-import unittest
-
-
-class Dashboard:
-  def __init__(self, file):
-    self.file = file
-    self.uid, self.queries = self.get_dashboard_uid_and_queries(file)
-    self.regexes = set(
-        self.parse_query_to_regex(query) for query in self.queries)
-
-  @staticmethod
-  def get_dashboard_uid_and_queries(file):
-    queries = []
-    with open(file, "r") as f:
-      data = json.load(f)
-      uid = data.get("uid")
-      for panel in data.get("panels", []):
-        for target in panel.get("targets", []):
-          query = target.get("query")
-          queries.append(query)
-
-    return uid, queries
-
-  @staticmethod
-  def parse_query_to_regex(query):
-    select_pattern = r"(.*FROM\s)(.*)(\sWHERE.*)"
-    match = re.match(select_pattern, query)
-    if match:
-      from_ = match.group(2)
-      without_quotes = re.sub(r"\"", "", from_)
-      without_retention_policy = without_quotes
-      if re.match(r"(\w+.\.)(.*)", without_quotes):
-        without_retention_policy = re.sub(r"(\w+.)(.*)", r"\2", without_quotes)
-
-      replaced_parameters = re.sub(
-          r"\$\{\w+\}", r"[\\w\\d]*", without_retention_policy)
-      return replaced_parameters
-
-  @staticmethod
-  def _get_json_files_from_directory(directory):
-    return [
-        os.path.join(directory, i) for i in os.listdir(directory)
-        if i.endswith(".json")
-    ]
-
-  @classmethod
-  def get_dashboards_from_directory(cls, directory):
-    for file in cls._get_json_files_from_directory(directory):
-      yield cls(file)
-
-
-def guess_dashboard_by_measurement(
-    measurement, directory, additional_query_substrings=None):
-  """
-  Guesses dashboard by measurement name by parsing queries and matching it 
with measurement.
-  It is done by using regular expressions obtained from queries.
-  Additionally query can be checked for presence of any of the substrings.
-  """
-  dashboards = list(Dashboard.get_dashboards_from_directory(directory))
-  ret = []
-  for dashboard in dashboards:
-    for regex in dashboard.regexes:
-      if additional_query_substrings and not any(
-          substring.lower() in query.lower()
-          for substring in additional_query_substrings
-          for query in dashboard.queries):
-        continue
-      if regex and re.match(regex, measurement):
-        ret.append(dashboard)
-  return list(set(ret))
-
-
-class TestParseQueryToRegex(unittest.TestCase):
-  def test_parse_query_to_regex_1(self):
-    query = (
-        'SELECT "runtimeMs" FROM "forever"."nexmark_${ID}_${processingType}" 
WHERE '
-        '"runner" =~ /^$runner$/ AND $timeFilter GROUP BY "runner"')
-    expected = r"nexmark_[\w\d]*_[\w\d]*"
-    result = Dashboard.parse_query_to_regex(query)
-    self.assertEqual(expected, result)
-
-  def test_parse_query_to_regex_2(self):
-    query = (
-        'SELECT mean("value") FROM "python_bqio_read_10GB_results" WHERE 
"metric" '
-        '=~ /runtime/ AND $timeFilter GROUP BY time($__interval), "metric"')
-    expected = "python_bqio_read_10GB_results"
-    result = Dashboard.parse_query_to_regex(query)
-    self.assertEqual(expected, result)
-
-  def test_parse_query_to_regex_3(self):
-    query = (
-        'SELECT mean("value") FROM "${sdk}_${processingType}_cogbk_3" WHERE '
-        '"metric" =~ /runtime/ AND $timeFilter GROUP BY time($__interval), 
"metric"'
-    )
-    expected = "[\w\d]*_[\w\d]*_cogbk_3"
-    result = Dashboard.parse_query_to_regex(query)
-    self.assertEqual(expected, result)
diff --git a/.test-infra/jenkins/metrics_report/report_generator.py 
b/.test-infra/jenkins/metrics_report/report_generator.py
deleted file mode 100644
index bdaada04f30..00000000000
--- a/.test-infra/jenkins/metrics_report/report_generator.py
+++ /dev/null
@@ -1,230 +0,0 @@
-#!/usr/bin/env python
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import argparse
-from itertools import product
-import logging
-import os
-
-from influxdb import InfluxDBClient
-import jinja2
-from prettytable import PrettyTable
-
-from dashboards_parser import guess_dashboard_by_measurement
-
-INFLUXDB_USER = os.getenv("INFLUXDB_USER")
-INFLUXDB_USER_PASSWORD = os.getenv("INFLUXDB_USER_PASSWORD")
-WORKING_SPACE = os.getenv("GITHUB_WORKSPACE", os.getenv("WORKSPACE", ""))
-if "GITHUB_WORKSPACE" in os.environ:
-   path_prefix = ""
-else:
-    path_prefix= "src/"
-PERF_DASHBOARDS = os.path.join(
-    WORKING_SPACE,
-    path_prefix+".test-infra/metrics/grafana/dashboards/perftests_metrics/")
-TABLE_FIELD_NAMES = [
-    "Measurement",
-    "Metric",
-    "Runner",
-    "Mean previous week",
-    "Mean last week",
-    "Diff %",
-    "Dashboard",
-]
-
-QUERY_RUNTIME = """SELECT mean("value") AS "mean_value" 
-  FROM 
-    "{database}"."{retention_policy}"."{measurement}"
-  WHERE 
-      time > (now()- 2w) 
-    AND 
-      time < now() 
-  GROUP BY time(1w), "metric" FILL(none);"""
-
-QUERY_RUNTIME_MS = """SELECT mean("runtimeMs") AS "mean_value" 
-  FROM 
-    "{database}"."{retention_policy}"."{measurement}"
-  WHERE 
-      time > (now()- 2w) 
-    AND 
-      time < now() 
-  GROUP BY time(1w), "runner" FILL(none);"""
-
-
-def parse_arguments():
-  """
-  Gets all necessary data.
-  Return: influx_host, influx_port, influx_db
-  """
-  parser = argparse.ArgumentParser(
-      description="Script for generating Beam Metrics Report.")
-  parser.add_argument("--influx-host", required=True)
-  parser.add_argument("--influx-port", required=True)
-  parser.add_argument("--influx-db", required=True)
-  parser.add_argument("--output-file", required=True)
-
-  args = parser.parse_args()
-
-  influx_host = args.influx_host
-  influx_port = args.influx_port
-  influx_db = args.influx_db
-  output_file = args.output_file
-
-  return influx_host, influx_port, influx_db, output_file
-
-
-def get_retention_policies_names(client, database):
-  return (
-      i.get("name")
-      for i in client.get_list_retention_policies(database=database))
-
-
-def get_measurements_names(client):
-  return (i.get("name") for i in client.get_list_measurements())
-
-
-def calc_diff(prev, curr):
-  """Returns percentage difference between two values."""
-  return ((curr - prev) / prev * 100.0 if prev != 0 else float("inf") *
-          abs(curr) / curr if curr != 0 else 0.0)
-
-
-def _get_query_runtime_data(client, bind_params):
-  """Returns data for measurements with runtime, write_time or read_time 
metrics"""
-  data = []
-  result = client.query(QUERY_RUNTIME.format(**bind_params))
-  for i in result.items():
-    measurement = i[0][0]
-    metric = i[0][1].get("metric")
-    runner = "-"
-    measurement_data = list(i[1])
-
-    if all(m not in metric for m in ["runtime", "write_time", "read_time"]):
-      continue
-
-    if len(measurement_data) >= 2:
-      previous = measurement_data[-2]["mean_value"]
-      current = measurement_data[-1]["mean_value"]
-      diff = calc_diff(previous, current)
-      dashboards = [
-          "http://metrics.beam.apache.org/d/{}".format(dashboard.uid)
-          for dashboard in guess_dashboard_by_measurement(
-              measurement,
-              PERF_DASHBOARDS,
-              ["runtime", "write_time", "read_time"],
-          )
-      ]
-      data.append([
-          measurement,
-          metric,
-          runner,
-          round(previous, 2),
-          round(current, 2),
-          round(diff, 2),
-          dashboards,
-      ])
-
-  return data
-
-
-def _get_query_runtime_ms_data(client, bind_params):
-  """Returns data for measurements with RuntimeMs metrics"""
-  data = []
-  result = client.query(QUERY_RUNTIME_MS.format(**bind_params))
-  for i in result.items():
-    measurement = i[0][0]
-    metric = "RuntimeMs"
-    runner = i[0][1].get("runner")
-    measurement_data = list(i[1])
-
-    if len(measurement_data) >= 2:
-      previous = measurement_data[-2]["mean_value"]
-      current = measurement_data[-1]["mean_value"]
-      diff = calc_diff(previous, current)
-      dashboards = [
-          "http://metrics.beam.apache.org/d/{}".format(dashboard.uid)
-          for dashboard in guess_dashboard_by_measurement(
-              measurement, PERF_DASHBOARDS, [metric])
-      ]
-      data.append([
-          measurement,
-          metric,
-          runner,
-          round(previous, 2),
-          round(current, 2),
-          round(diff, 2),
-          dashboards,
-      ])
-
-  return data
-
-
-def get_metrics_data(client, database):
-  data = []
-  for retention_policy, measurements_name in product(
-      get_retention_policies_names(client, database), 
get_measurements_names(client)
-  ):
-    bind_params = {
-        "database": database,
-        "measurement": measurements_name,
-        "retention_policy": retention_policy,
-    }
-
-    data.extend(_get_query_runtime_data(client, bind_params))
-    data.extend(_get_query_runtime_ms_data(client, bind_params))
-
-  return [d for d in data if d]
-
-
-def print_table(data):
-  table = PrettyTable()
-  table.field_names = TABLE_FIELD_NAMES
-  for d in data:
-    table.add_row(d)
-  print(table)
-
-
-def generate_report(data, output_file):
-  logging.info("Generating {}".format(output_file))
-  env = jinja2.Environment(
-      loader=jinja2.FileSystemLoader(
-          os.path.join(
-              os.path.dirname(os.path.realpath(__file__)), "templates")),
-  )
-  template = env.get_template("Metrics_Report.template")
-  with open(output_file, "w") as file:
-    file.write(template.render(headers=TABLE_FIELD_NAMES, metrics_data=data))
-  logging.info("{} saved.".format(output_file))
-
-
-def main():
-  influx_host, influx_port, influx_db, output_file = parse_arguments()
-
-  client = InfluxDBClient(
-      host=influx_host,
-      port=influx_port,
-      database=influx_db,
-      username=INFLUXDB_USER,
-      password=INFLUXDB_USER_PASSWORD,
-  )
-
-  data = get_metrics_data(client, influx_db)
-  print_table(data)
-  generate_report(data, output_file)
-
-
-if __name__ == "__main__":
-  main()
diff --git a/.test-infra/jenkins/metrics_report/requirements.txt 
b/.test-infra/jenkins/metrics_report/requirements.txt
deleted file mode 100644
index d60ad953a24..00000000000
--- a/.test-infra/jenkins/metrics_report/requirements.txt
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Markupsafe breaking change broke Jinja and some other libs
-# Pinning it to a version which works even though we are not using explicitly
-# https://github.com/aws/aws-sam-cli/issues/3661
-markupsafe==2.0.1
-influxdb==5.3.0
-Jinja2==3.1.6
-prettytable==0.7.2
diff --git 
a/.test-infra/jenkins/metrics_report/templates/Metrics_Report.template 
b/.test-infra/jenkins/metrics_report/templates/Metrics_Report.template
deleted file mode 100644
index 9492322219b..00000000000
--- a/.test-infra/jenkins/metrics_report/templates/Metrics_Report.template
+++ /dev/null
@@ -1,141 +0,0 @@
-{#
-   Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-#}
-
-<html>
-<body>
-
-<h1>Beam Metrics Report</h1>
-
-Color legend:
-<table>
-    <tr>
-        <td style="background-color:Red;padding:9px;text-align:center;">>= 
20</td>
-        <td style="background-color:Yellow;padding:9px;text-align:center;">>= 
10</td>
-        <td 
style="background-color:YellowGreen;padding:9px;text-align:center;"><= -10</td>
-        <td style="background-color:Green;padding:9px;text-align:center;"><= 
-20</td>
-    </tr>
-</table>
-
-
-<h2>Possible regression</h2>
-
-<table>
-<tr>
-    {% for header in headers -%}
-        <td style="border-bottom:1px solid #ddd"><b>{{ header }}</b></td>
-    {% endfor %}
-</tr>
-
-{% for row in metrics_data -%}
-    {% if row[5] >= 10 %}
-        <tr>
-            {% for item in row -%}
-                {% if not loop.last %}
-                    {% if loop.index == 6 and item >= 20 %}
-                        <td style="background-color:Red;border-bottom:1px 
solid #ddd">{{ item }}</td>
-                    {% elif loop.index == 6 and item >= 10 %}
-                        <td style="background-color:Yellow;border-bottom:1px 
solid #ddd">{{ item }}</td>
-                    {% else %}
-                        <td style="border-bottom: 1px solid #ddd">{{ item 
}}</td>
-                    {% endif %}
-                {% else %}
-                    <td style="border-bottom:1px solid #ddd">
-                    {% for link in item -%}
-                        <a href={{ link }}>[{{ loop.index }}]</a>
-                    {% endfor %}
-                    </td>
-                {% endif %}
-            {% endfor %}
-        </tr>
-    {% endif %}
-{% endfor %}
-</table>
-
-
-<h2>Possible improvement</h2>
-
-<table>
-<tr>
-    {% for header in headers -%}
-        <td style="border-bottom:1px solid #ddd"><b>{{ header }}</b></td>
-    {% endfor %}
-</tr>
-
-{% for row in metrics_data -%}
-    {% if row[5] <= -10 %}
-        <tr>
-            {% for item in row -%}
-                {% if not loop.last %}
-                    {% if loop.index == 6 and item <= -20 %}
-                        <td style="background-color:Green;border-bottom:1px 
solid #ddd">{{ item }}</td>
-                    {% elif loop.index == 6 and item <= -10 %}
-                        <td 
style="background-color:YellowGreen;border-bottom:1px solid s#ddd">{{ item 
}}</td>
-                    {% else %}
-                        <td style="border-bottom: 1px solid #ddd">{{ item 
}}</td>
-                    {% endif %}
-                {% else %}
-                    <td style="border-bottom:1px solid #ddd">
-                    {% for link in item -%}
-                        <a href={{ link }}>[{{ loop.index }}]</a>
-                    {% endfor %}
-                    </td>
-                {% endif %}
-            {% endfor %}
-        </tr>
-    {% endif %}
-{% endfor %}
-</table>
-
-
-<h2>All metrics</h2>
-<table>
-<tr>
-    {% for header in headers -%}
-        <td style="border-bottom:1px solid #ddd"><b>{{ header }}</b></td>
-    {% endfor %}
-</tr>
-
-{% for row in metrics_data -%}
-<tr>
-    {% for item in row -%}
-        {% if not loop.last %}
-            {% if loop.index == 6 and item >= 20 %}
-                <td style="background-color:Red;border-bottom:1px solid 
#ddd">{{ item }}</td>
-            {% elif loop.index == 6 and item >= 10 %}
-                <td style="background-color:Yellow;border-bottom:1px solid 
#ddd">{{ item }}</td>
-            {% elif loop.index == 6 and item <= -20 %}
-                <td style="background-color:Green;border-bottom:1px solid 
#ddd">{{ item }}</td>
-            {% elif loop.index == 6 and item <= -10 %}
-                <td style="background-color:YellowGreen;border-bottom:1px 
solid s#ddd">{{ item }}</td>
-            {% else %}
-                <td style="border-bottom: 1px solid #ddd">{{ item }}</td>
-            {% endif %}
-        {% else %}
-            <td style="border-bottom:1px solid #ddd">
-            {% for link in item -%}
-                <a href={{ link }}>[{{ loop.index }}]</a>
-            {% endfor %}
-            </td>
-        {% endif %}
-    {% endfor %}
-</tr>
-{% endfor %}
-</table>
-
-</body>
-</html>
diff --git a/.test-infra/jenkins/metrics_report/tox.ini 
b/.test-infra/jenkins/metrics_report/tox.ini
deleted file mode 100644
index 56fa14e6727..00000000000
--- a/.test-infra/jenkins/metrics_report/tox.ini
+++ /dev/null
@@ -1,36 +0,0 @@
-;
-;    Licensed to the Apache Software Foundation (ASF) under one or more
-;    contributor license agreements.  See the NOTICE file distributed with
-;    this work for additional information regarding copyright ownership.
-;    The ASF licenses this file to You under the Apache License, Version 2.0
-;    (the "License"); you may not use this file except in compliance with
-;    the License.  You may obtain a copy of the License at
-;
-;       http://www.apache.org/licenses/LICENSE-2.0
-;
-;    Unless required by applicable law or agreed to in writing, software
-;    distributed under the License is distributed on an "AS IS" BASIS,
-;    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-;    See the License for the specific language governing permissions and
-;    limitations under the License.
-;
-; TODO(https://github.com/apache/beam/issues/20209): Don't hardcode Py3.8 
version.
-[tox]
-skipsdist = True
-envlist = py310-test,py310-generate-report
-
-[testenv]
-commands_pre =
-  python --version
-  pip --version
-  pip check
-
-[testenv:py310-test]
-deps = -r requirements.txt
-passenv = WORKSPACE,INFLUXDB_USER,INFLUXDB_USER_PASSWORD
-commands = python -m unittest dashboards_parser.py
-
-[testenv:py310-generate-report]
-deps = -r requirements.txt
-passenv = WORKSPACE,INFLUXDB_USER,INFLUXDB_USER_PASSWORD,GITHUB_WORKSPACE
-commands = python report_generator.py {posargs}
diff --git a/contributor-docs/committer-guide.md 
b/contributor-docs/committer-guide.md
index 9e63679776c..f5d07d7da0d 100644
--- a/contributor-docs/committer-guide.md
+++ b/contributor-docs/committer-guide.md
@@ -145,12 +145,3 @@ Instead, pull it all into the subject line:
     Merge pull request #1234: [BEAM-7873] Fix the foo bizzle bazzle
 
 If you have comments to add, put them in the body of the commit message.
-
-## Seed jobs
-
-As a committer, you can now run seed jobs! These are used to update our Jenkins
-configuration and can be run to test PRs modifying Groovy files before they are
-merged.
-
-To make sure you have these permissions, put up a PR adding yourself to
-https://github.com/apache/beam/blob/master/.test-infra/jenkins/Committers.groovy
diff --git a/settings.gradle.kts b/settings.gradle.kts
index 40ddf1c0b43..bea48565bfc 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -339,8 +339,6 @@ include("beam-test-infra-mock-apis")
 project(":beam-test-infra-mock-apis").projectDir = 
file(".test-infra/mock-apis")
 include("beam-test-tools")
 project(":beam-test-tools").projectDir = file(".test-infra/tools")
-include("beam-test-jenkins")
-project(":beam-test-jenkins").projectDir = file(".test-infra/jenkins")
 include("beam-test-gha")
 project(":beam-test-gha").projectDir = file(".github")
 include("beam-validate-runner")
diff --git a/website/www/site/content/en/documentation/io/testing.md 
b/website/www/site/content/en/documentation/io/testing.md
index 43a18d1e7c5..be256470558 100644
--- a/website/www/site/content/en/documentation/io/testing.md
+++ b/website/www/site/content/en/documentation/io/testing.md
@@ -387,14 +387,6 @@ Guidelines for creating a Beam data store Kubernetes 
script:
         1.  Official Docker images, because they have security fixes and 
guaranteed maintenance.
         1.  Non-official Docker images, or images from other providers that 
have good maintainers (e.g. [quay.io](https://quay.io/)).
 
-#### Jenkins jobs {#jenkins-jobs}
-
-You can find examples of existing IOIT jenkins job definitions in 
[.test-infra/jenkins](https://github.com/apache/beam/tree/master/.test-infra/jenkins)
 directory. Look for files called job_PerformanceTest_*.groovy. The most 
prominent examples are:
-* 
[JDBC](https://github.com/apache/beam/blob/master/.test-infra/jenkins/job_PerformanceTests_JDBC.groovy)
 IOIT job
-* 
[MongoDB](https://github.com/apache/beam/blob/master/.test-infra/jenkins/job_PerformanceTests_MongoDBIO_IT.groovy)
 IOIT job
-* 
[File-based](https://github.com/apache/beam/blob/master/.test-infra/jenkins/job_PerformanceTests_FileBasedIO_IT.groovy)
 IOIT jobs
-
-Notice that there is a utility class helpful in creating the jobs easily 
without forgetting important steps or repeating code. See 
[Kubernetes.groovy](https://github.com/apache/beam/blob/master/.test-infra/jenkins/Kubernetes.groovy)
 for more details.
 
 ### Small Scale and Large Scale Integration Tests 
{#small-scale-and-large-scale-integration-tests}
 

Reply via email to