This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-3 by this push:
     new 53664500283 HBASE-29994 Separate backwards compatibility checks to a 
new jenkins job (#7938)
53664500283 is described below

commit 53664500283b56e90e52d60dfd56b4453c4b14be
Author: Duo Zhang <[email protected]>
AuthorDate: Sun Mar 15 22:25:32 2026 +0800

    HBASE-29994 Separate backwards compatibility checks to a new jenkins job 
(#7938)
    
    Signed-off-by: Xiao Liu <[email protected]>
---
 dev-support/Jenkinsfile                            | 145 ---------
 ...doop3-backwards-compatibility-check.Jenkinsfile | 326 +++++++++++++++++++++
 .../integration-test/integration-test.Jenkinsfile  |   7 +
 3 files changed, 333 insertions(+), 145 deletions(-)

diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 6708e2837cb..e0393bb79d7 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -139,14 +139,6 @@ pipeline {
         stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: 
"${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
         stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: 
"${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
         stash name: 'jdk17-hadoop3-result', allowEmpty: true, includes: 
"${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/doesn't-match"
-        script {
-          hadoop3_versions = env.HADOOP3_VERSIONS.split(",");
-          for (hadoop3_version in hadoop3_versions) {
-            // confusing environment vs Groovy variables
-            stash(name: "jdk17-hadoop3-backwards-result-${hadoop3_version}", 
allowEmpty: true, includes: 
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${hadoop3_version}/doesn't-match")
-          }
-        }
-        stash name: 'srctarball-result', allowEmpty: true, includes: 
"output-srctarball/doesn't-match"
       }
     }
     stage ('health checks') {
@@ -703,134 +695,6 @@ pipeline {
             }
           }
         }
-        // If/when we transition to transient runners, we could run every 
Hadoop check as a matrix job
-        stage ('yetus jdk17 hadoop3 backwards compatibility checks') {
-          agent {
-            node {
-              label 'hbase'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = "${env.DEEP_CHECKS}"
-            SET_JAVA_HOME = "/usr/lib/jvm/java-17"
-            // Activates hadoop 3.0 profile in maven runs.
-            HADOOP_PROFILE = '3.0'
-            // HADOOP_THREE_VERSION is set in script for loop
-            TEST_PROFILE = 'runDevTests'
-            SKIP_ERRORPRONE = true
-          }
-          steps {
-            script {
-              for (hadoop3_version in hadoop3_versions) {
-                if (hadoop3_version == env.HADOOP3_DEFAULT_VERSION) {
-                  // We are running the full test suite, no need to run the 
dev tests too
-                  continue
-                }
-                //HADOOP_THREE_VERSION is the environment variable name 
expected by the nightly shell script
-                env.HADOOP_THREE_VERSION = hadoop3_version;
-                env.OUTPUT_DIR_RELATIVE = 
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${env.HADOOP_THREE_VERSION}"
-                env.OUTPUT_DIR = 
"${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${env.HADOOP_THREE_VERSION}"
-                try {
-                  stage ('yetus jdk17 hadoop3 backwards compatibility checks 
inner stage') {
-                    // Must do prior to anything else, since if one of them 
timesout we'll stash the commentfile
-                    sh '''#!/usr/bin/env bash
-                     set -e
-                     rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-                     rm -f "${OUTPUT_DIR}/commentfile"
-                    '''
-                    unstash 'yetus'
-                    dir('component') {
-                      checkout scm
-                    }
-                    sh '''#!/usr/bin/env bash
-                      set -e
-                      rm -rf "${OUTPUT_DIR}/machine" && mkdir 
"${OUTPUT_DIR}/machine"
-                      "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"${OUTPUT_DIR_RELATIVE}/machine"
-                      echo "got the following saved stats in 
'${OUTPUT_DIR_RELATIVE}/machine'"
-                      ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
-                    '''
-                    script {
-                      def ret = sh(
-                        returnStatus: true,
-                        script: '''#!/usr/bin/env bash
-                          set -e
-                          declare -i status=0
-                          if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; 
then
-                            echo "(/) {color:green}+1 jdk17 hadoop 
${HADOOP_THREE_VERSION} backward compatibility checks{color}" > 
"${OUTPUT_DIR}/commentfile"
-                          else
-                            echo "(x) {color:red}-1 jdk17 hadoop 
${HADOOP_THREE_VERSION} backward compatibility checks{color}" > 
"${OUTPUT_DIR}/commentfile"
-                            status=1
-                          fi
-                          echo "-- For more information [see jdk17 
report|${BUILD_URL}JDK17_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> 
"${OUTPUT_DIR}/commentfile"
-                          exit "${status}"
-                        '''
-                      )
-                      if (ret != 0) {
-                        // mark the build as UNSTABLE instead of FAILURE, to 
avoid skipping the later publish of
-                        // test output. See HBASE-26339 for more details.
-                        currentBuild.result = 'UNSTABLE'
-                      }
-                    } //script
-                  } //stage ('yetus jdk17 hadoop3 backwards compatibility 
checks inner stage') {
-                } //try
-                finally {
-                  stash name: 
"jdk17-hadoop3-backwards-result-${HADOOP_THREE_VERSION}", includes: 
"${OUTPUT_DIR_RELATIVE}/commentfile"
-                  junit testResults: 
"${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-                  // zip surefire reports.
-                  sh '''#!/bin/bash -e
-                    if [ ! -f "${OUTPUT_DIR}/commentfile" ]; then
-                      echo "(x) {color:red}-1 jdk17 hadoop 
${HADOOP_THREE_VERSION} backward compatibility checks{color}" 
>"${OUTPUT_DIR}/commentfile"
-                      echo "-- Something went wrong running this stage, please 
[check relevant console output|${BUILD_URL}/console]." >> 
"${OUTPUT_DIR}/commentfile"
-                    fi
-                    if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                      count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                      if [[ 0 -ne ${count} ]]; then
-                        echo "zipping ${count} archived files"
-                        zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" 
"${OUTPUT_DIR}/archiver"
-                      else
-                        echo "No archived files, skipping compressing."
-                      fi
-                    else
-                      echo "No archiver directory, skipping compressing."
-                    fi
-                  '''
-                  sshPublisher(publishers: [
-                    sshPublisherDesc(configName: 'Nightlies',
-                      transfers: [
-                        sshTransfer(remoteDirectory: 
"hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                          sourceFiles: 
"${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
-                        )
-                      ]
-                    )
-                  ])
-                  // remove the big test logs zip file, store the nightlies 
url in test_logs.html
-                  sh '''#!/bin/bash -e
-                    if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
-                      echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving 
space"
-                      rm -rf "${OUTPUT_DIR}/test_logs.zip"
-                      python3 ${BASEDIR}/dev-support/gen_redirect_html.py 
"${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
-                    else
-                      echo "No test_logs.zip, skipping"
-                    fi
-                '''
-                  // Has to be relative to WORKSPACE.
-                  archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
-                  archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
-                  publishHTML target: [
-                    allowMissing         : true,
-                    keepAll              : true,
-                    alwaysLinkToLastBuild: true,
-                    // Has to be relative to WORKSPACE.
-                    reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                    reportFiles          : 'console-report.html',
-                    reportName           : "JDK17 Nightly Build Report (Hadoop 
${HADOOP_THREE_VERSION} backwards compatibility)"
-                  ]
-                } //finally
-              } // for
-            } //script
-          } //steps
-        } //stage ('yetus jdk17 hadoop3 backwards compatibility checks')
       } // parallel
     } //stage:_health checks
   } //stages
@@ -847,7 +711,6 @@ pipeline {
              rm -rf ${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}
              rm -rf ${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}
              rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}
-             rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-*
            '''
            unstash 'general-result'
            unstash 'jdk8-hadoop2-result'
@@ -860,14 +723,6 @@ pipeline {
                           
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
                           
"${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
                           
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/commentfile"]
-           for (hadoop3_version in hadoop3_versions) {
-             if (hadoop3_version == env.HADOOP3_DEFAULT_VERSION) {
-                // We haven't run these tests
-                continue
-            }
-             unstash("jdk17-hadoop3-backwards-result-${hadoop3_version}")
-               
results.add("${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${hadoop3_version}/commentfile")
-           }
            echo env.BRANCH_NAME
            echo env.BUILD_URL
            echo currentBuild.result
diff --git a/dev-support/hadoop3-backwards-compatibility-check.Jenkinsfile 
b/dev-support/hadoop3-backwards-compatibility-check.Jenkinsfile
new file mode 100644
index 00000000000..8e8eff8ded3
--- /dev/null
+++ b/dev-support/hadoop3-backwards-compatibility-check.Jenkinsfile
@@ -0,0 +1,326 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// Jenkinsfile for Hadoop3 Backwards Compatibility Checks
+// Uses matrix job to parallelize checks across different Hadoop3 versions
+
+pipeline {
+  agent {
+    node {
+      label 'hbase'
+    }
+  }
+  triggers {
+    pollSCM('H H */2 * *')
+  }
+  options {
+    buildDiscarder(logRotator(numToKeepStr: '20'))
+    timeout (time: 8, unit: 'HOURS')
+    timestamps()
+    skipDefaultCheckout()
+    disableConcurrentBuilds()
+  }
+  environment {
+    YETUS_RELEASE = '0.15.0'
+    HADOOP_VERSIONS = "3.2.4,3.3.5,3.3.6,3.4.0,3.4.1,3.4.2"
+  }
+  parameters {
+    booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
+
+    Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
+    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
+  }
+  stages {
+    stage ('scm-checkout') {
+      steps {
+        dir('component') {
+          checkout scm
+        }
+      }
+    }
+    stage ('thirdparty installs') {
+      parallel {
+        stage ('yetus install') {
+          steps {
+            dir('downloads-yetus') {
+              sh '''#!/usr/bin/env bash
+                echo "Make sure we have a directory for downloading 
dependencies: $(pwd)"
+              '''
+            }
+            sh  '''#!/usr/bin/env bash
+              set -e
+              echo "Ensure we have a copy of Apache Yetus."
+              if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+                YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+                echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+                if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; 
then
+                  rm -rf "${YETUS_DIR}"
+                  
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
 \
+                      --working-dir "${WORKSPACE}/downloads-yetus" \
+                      --keys 'https://downloads.apache.org/yetus/KEYS' \
+                      --verify-tar-gz \
+                      "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
+                      
"yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
+                  mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
+                else
+                  echo "Reusing cached install of Apache Yetus version 
${YETUS_RELEASE}."
+                fi
+              else
+                YETUS_DIR="${WORKSPACE}/yetus-git"
+                rm -rf "${YETUS_DIR}"
+                echo "downloading from github"
+                curl -L --fail 
https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
+              fi
+              if [ ! -d "${YETUS_DIR}" ]; then
+                echo "unpacking yetus into '${YETUS_DIR}'"
+                mkdir -p "${YETUS_DIR}"
+                gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" 
--strip-components 1
+              fi
+            '''
+            stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*"
+          }
+        }
+      }
+    }
+    stage ('backwards compatibility checks') {
+      matrix {
+        axes {
+          axis {
+            name 'HADOOP3_VERSION'
+            values '3.2.4', '3.3.5', '3.3.6', '3.4.0', '3.4.1', '3.4.2'
+          }
+        }
+        agent {
+          node {
+            label 'hbase'
+          }
+        }
+        when {
+          expression {
+            if (HADOOP3_VERSION == '3.2.4') {
+              // only branch-2.5 need to run against hadoop 3.2.4, here we 
also includes
+              // HBASE-XXXXX-branch-2.5 feature branch
+              return env.BRANCH_NAME.contains('branch-2.5')
+            }
+            return true
+          }
+        }
+        environment {
+          PROJECT = 'hbase'
+          BASEDIR = "${WORKSPACE}/component"
+          PERSONALITY_FILE = "${BASEDIR}/dev-support/hbase-personality.sh"
+          TESTS_FILTER = 
'checkstyle,javac,javadoc,pylint,shellcheck,shelldocs,blanks,perlcritic,ruby-lint,rubocop'
+          EXCLUDE_TESTS_URL = 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
+          ASF_NIGHTLIES = 'https://nightlies.apache.org'
+          ASF_NIGHTLIES_BASE_ORI = 
"${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}"
+          ASF_NIGHTLIES_BASE = "${ASF_NIGHTLIES_BASE_ORI.replaceAll(' ', 
'%20')}"
+          TESTS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit'
+          SET_JAVA_HOME = "/usr/lib/jvm/java-17"
+          HADOOP_PROFILE = '3.0'
+          TEST_PROFILE = 'runDevTests'
+          SKIP_ERRORPRONE = true
+          OUTPUT_DIR_RELATIVE = 
"output-jdk17-hadoop3-backwards-${HADOOP3_VERSION}"
+          OUTPUT_DIR = "${WORKSPACE}/${OUTPUT_DIR_RELATIVE}"
+          AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc'
+          BLANKS_EOL_IGNORE_FILE = 'dev-support/blanks-eol-ignore.txt'
+          BLANKS_TABS_IGNORE_FILE = 'dev-support/blanks-tabs-ignore.txt'
+          // output from surefire; sadly the archive function in yetus only 
works on file names.
+          ARCHIVE_PATTERN_LIST = 
'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
+        }
+        stages {
+          stage ('run checks') {
+            steps {
+              sh '''#!/usr/bin/env bash
+                set -e
+                rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
+                rm -f "${OUTPUT_DIR}/commentfile"
+              '''
+              unstash 'yetus'
+              dir('component') {
+                checkout scm
+              }
+              sh '''#!/usr/bin/env bash
+                set -e
+                rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
+                "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"${OUTPUT_DIR_RELATIVE}/machine"
+                echo "got the following saved stats in 
'${OUTPUT_DIR_RELATIVE}/machine'"
+                ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
+              '''
+              script {
+                def ret = sh(
+                  returnStatus: true,
+                  script: '''#!/usr/bin/env bash
+                    set -e
+                    declare -i status=0
+                    if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
+                      echo "(/) {color:green}+1 jdk17 hadoop 
${HADOOP3_VERSION} backward compatibility checks{color}" > 
"${OUTPUT_DIR}/commentfile"
+                    else
+                      echo "(x) {color:red}-1 jdk17 hadoop ${HADOOP3_VERSION} 
backward compatibility checks{color}" > "${OUTPUT_DIR}/commentfile"
+                      status=1
+                    fi
+                    echo "-- For more information [see jdk17 
report|${BUILD_URL}console]" >> "${OUTPUT_DIR}/commentfile"
+                    exit "${status}"
+                  '''
+                )
+                if (ret != 0) {
+                  currentBuild.result = 'UNSTABLE'
+                }
+              }
+            }
+          }
+        }
+        post {
+          always {
+            script {
+              stash name: "jdk17-hadoop3-backwards-result-${HADOOP3_VERSION}", 
includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
+              junit testResults: 
"${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
+              // zip surefire reports.
+              sh '''#!/bin/bash -e
+                if [ ! -f "${OUTPUT_DIR}/commentfile" ]; then
+                  echo "(x) {color:red}-1 jdk17 hadoop ${HADOOP3_VERSION} 
backward compatibility checks{color}" >"${OUTPUT_DIR}/commentfile"
+                  echo "-- Something went wrong running this stage, please 
[check relevant console output|${BUILD_URL}/console]." >> 
"${OUTPUT_DIR}/commentfile"
+                fi
+                if [ -d "${OUTPUT_DIR}/archiver" ]; then
+                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
+                  if [[ 0 -ne ${count} ]]; then
+                    echo "zipping ${count} archived files"
+                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" 
"${OUTPUT_DIR}/archiver"
+                  else
+                    echo "No archived files, skipping compressing."
+                  fi
+                else
+                  echo "No archiver directory, skipping compressing."
+                fi
+              '''
+              def logFile = "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+              if (fileExists(logFile)) {
+                sshPublisher(publishers: [
+                  sshPublisherDesc(configName: 'Nightlies',
+                    transfers: [
+                      sshTransfer(remoteDirectory: 
"hbase/${JOB_NAME}/${BUILD_NUMBER}",
+                        sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+                      )
+                    ]
+                  )
+                ])
+                sh '''#!/bin/bash -e
+                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
+                  python3 ${BASEDIR}/dev-support/gen_redirect_html.py 
"${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
+                '''
+              }
+              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
+              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
+              publishHTML target: [
+                allowMissing: true,
+                keepAll: true,
+                alwaysLinkToLastBuild: true,
+                reportDir: "${env.OUTPUT_DIR_RELATIVE}",
+                reportFiles: 'console-report.html',
+                reportName: "JDK17 Nightly Build Report (Hadoop 
${HADOOP3_VERSION} backwards compatibility)"
+              ]
+            } // script
+          } // always
+        } // post
+      } // matrix
+    } // stage ('backwards compatibility checks')
+  } // stages
+  post {
+    always {
+      script {
+        sh "printenv"
+        // wipe out all the output directories before unstashing
+        sh'''
+          echo "Clean up result directories"
+          rm -rf output-jdk17-hadoop3-backwards-*
+        '''
+        def results = []
+        for (hadoopVersion in getHadoopVersions(env.HADOOP_VERSIONS)) {
+          try {
+            unstash "jdk17-hadoop3-backwards-result-${hadoopVersion}"
+            
results.add("output-jdk17-hadoop3-backwards-${hadoopVersion}/commentfile")
+          } catch (e) {
+            echo "unstash ${hadoopVersion} failed, ignore"
+          }
+        }
+        try {
+          def comment = "Results for branch ${env.BRANCH_NAME}\n"
+          comment += "\t[build ${currentBuild.displayName} on 
builds.a.o|${env.BUILD_URL}]: "
+          if (currentBuild.result == null || currentBuild.result == "SUCCESS") 
{
+            comment += "(/) *{color:green}+1 overall{color}*\n"
+          } else {
+            comment += "(x) *{color:red}-1 overall{color}*\n"
+          }
+          comment += "----\n"
+          comment += "Backwards compatibility checks:\n"
+          comment += results.collect { fileExists(file: it) ? readFile(file: 
it) : "" }.join("\n\n")
+
+          echo "[INFO] Comment:"
+          echo comment
+
+          def jiras = getJirasToComment(env.BRANCH_NAME, [])
+          if (jiras.isEmpty()) {
+            echo "[DEBUG] non-feature branch, checking change messages for 
jira keys."
+            jiras = getJirasToCommentFromChangesets(currentBuild)
+          }
+          jiras.each { currentIssue ->
+            jiraComment issueKey: currentIssue, body: comment
+          }
+        } catch (Exception exception) {
+          echo "Got exception: ${exception}"
+          echo "    ${exception.getStackTrace()}"
+        }
+      }
+    }
+  }
+}
+
+@NonCPS
+List<String> getHadoopVersions(String versions) {
+  return versions.split(',').collect { it.trim() }.findAll { it } as String[]
+}
+
+import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
+@NonCPS
+List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
+  def seenJiras = []
+  thisBuild.changeSets.each { cs ->
+    cs.getItems().each { change ->
+      CharSequence msg = change.msg
+      echo "change: ${change}"
+      echo "     ${msg}"
+      echo "     ${change.commitId}"
+      echo "     ${change.author}"
+      seenJiras = getJirasToComment(msg, seenJiras)
+    }
+  }
+  return seenJiras
+}
+
+@NonCPS
+List<String> getJirasToComment(CharSequence source, List<String> seen) {
+  source.eachMatch("HBASE-[0-9]+") { currentIssue ->
+    echo "[DEBUG] found jira key: ${currentIssue}"
+    if (currentIssue in seen) {
+      echo "[DEBUG] already commented on ${currentIssue}."
+    } else {
+      echo "[INFO] commenting on ${currentIssue}."
+      seen << currentIssue
+    }
+  }
+  return seen
+}
diff --git a/dev-support/integration-test/integration-test.Jenkinsfile 
b/dev-support/integration-test/integration-test.Jenkinsfile
index d46f6d763b6..d4490bc3311 100644
--- a/dev-support/integration-test/integration-test.Jenkinsfile
+++ b/dev-support/integration-test/integration-test.Jenkinsfile
@@ -314,6 +314,13 @@ pipeline {
   post {
     always {
       script {
+        sh "printenv"
+        // wipe out all the output directories before unstashing
+        sh'''
+          echo "Clean up result directories"
+          rm -rf output-srctarball
+          rm -rf output-integration-hadoop-*
+        '''
         def results = []
         results.add('output-srctarball/commentfile')
         for (hadoopVersion in getHadoopVersions(env.HADOOP_VERSIONS)) {

Reply via email to