This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-28384-branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit 0473a89e2379e835d3ee4e1a9365c668e4de43b4
Author: Duo Zhang <zhang...@apache.org>
AuthorDate: Mon Feb 26 23:11:20 2024 +0800

    HBASE-28384 Client ingegration tests fails for branch-2/branch-2.6
---
 dev-support/Jenkinsfile                      | 32 +++++++++---
 dev-support/hbase_nightly_source-artifact.sh | 75 ++++++++++++++++++++++------
 2 files changed, 84 insertions(+), 23 deletions(-)

diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index a767775b36b..19256ccb9b1 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -657,6 +657,8 @@ pipeline {
               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
               rm -rf "hbase-install" && mkdir "hbase-install"
               rm -rf "hbase-client" && mkdir "hbase-client"
+              rm -rf "hbase-hadoop3-install"
+              rm -rf "hbase-hadoop3-client"
               rm -rf "hadoop-2" && mkdir "hadoop-2"
               rm -rf "hadoop-3" && mkdir "hadoop-3"
               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
@@ -691,14 +693,23 @@ pipeline {
             """
             echo "unpacking the hbase bin tarball into 'hbase-install' and the 
client tarball into 'hbase-client'"
             sh '''#!/bin/bash -e
-              if [ 2 -ne $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
wc -l) ]; then
+              if [ 2 -ne $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v hadoop3 | wc -l) ]; then
                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- 
source tarball did not produce the expected binaries.' 
>>output-srctarball/commentfile
                 exit 1
               fi
-              install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
sort | head -n 1)
+              install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v client-bin | grep -v hadoop3)
               tar --strip-component=1 -xzf "${install_artifact}" -C 
"hbase-install"
-              client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
sort | tail -n 1)
+              client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz
 | grep -v hadoop3)
               tar --strip-component=1 -xzf "${client_artifact}" -C 
"hbase-client"
+              if [ 2 -eq $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | wc -l) ]; then
+                echo "hadoop3 artifacts available, unpacking the hbase hadoop3 
bin tarball into 'hbase-hadoop3-install' and the client hadoop3 tarball into 
'hbase-hadoop3-client'"
+                mkdir hbase-hadoop3-install
+                mkdir hbase-hadoop3-client
+                hadoop3_install_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz
 | grep -v client-bin)
+                tar --strip-component=1 -xzf "${hadoop3_install_artifact}" -C 
"hbase-hadoop3-install"
+                hadoop3_client_artifact=$(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-client-bin.tar.gz)
+                tar --strip-component=1 -xzf "${hadoop3_client_artifact}" -C 
"hbase-hadoop3-client"
+              fi
             '''
             unstash 'hadoop-2'
             sh '''#!/bin/bash -xe
@@ -731,11 +742,18 @@ pipeline {
               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
               # we need to patch some files otherwise minicluster will fail to 
start, see MAPREDUCE-7471
               ${BASEDIR}/dev-support/patch-hadoop3.sh hadoop-3
+              hbase_install_dir="hbase-install"
+              hbase_client_dir="hbase-client"
+              if [ -d "hbase-hadoop3-install" ]; then
+                echo "run hadoop3 client integration test against hbase 
hadoop3 binaries"
+                hbase_install_dir="hbase-hadoop3-install"
+                hbase_client_dir="hbase-hadoop3-client"
+              fi
               if ! 
"${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
                   --single-process \
                   --working-dir output-integration/hadoop-3 \
-                  --hbase-client-install hbase-client \
-                  hbase-install \
+                  --hbase-client-install ${hbase_client_dir} \
+                  ${hbase_install_dir} \
                   hadoop-3/bin/hadoop \
                   hadoop-3/share/hadoop/yarn/timelineservice \
                   
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
@@ -750,8 +768,8 @@ pipeline {
                   --single-process \
                   --hadoop-client-classpath 
hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar
 \
                   --working-dir output-integration/hadoop-3-shaded \
-                  --hbase-client-install hbase-client \
-                  hbase-install \
+                  --hbase-client-install ${hbase_client_dir} \
+                  ${hbase_install_dir} \
                   hadoop-3/bin/hadoop \
                   hadoop-3/share/hadoop/yarn/timelineservice \
                   
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
diff --git a/dev-support/hbase_nightly_source-artifact.sh 
b/dev-support/hbase_nightly_source-artifact.sh
index 5d9902e5f04..075e054f0b5 100755
--- a/dev-support/hbase_nightly_source-artifact.sh
+++ b/dev-support/hbase_nightly_source-artifact.sh
@@ -16,7 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-set -e
 function usage {
   echo "Usage: ${0} [options] /path/to/component/checkout"
   echo ""
@@ -170,20 +169,64 @@ else
   echo "Everything looks as expected."
 fi
 
+function get_hadoop3_version {
+  local version="$1"
+  if [[ "${version}" =~ -SNAPSHOT$ ]]; then
+    echo "${version/-SNAPSHOT/-hadoop3-SNAPSHOT}"
+  else
+    echo "${version}-hadoop3"
+  fi
+}
+
+function build_tarball {
+  local build_hadoop3=$1
+  local mvn_extra_args=""
+  local build_log="srctarball_install.log"
+  local tarball_glob="hbase-*-bin.tar.gz"
+  if [ $build_hadoop3 -ne 0 ]; then
+    local version=$(mvn help:evaluate -Dexpression=project.version -q 
-DforceStdout)
+    local hadoop3_version=$(get_hadoop3_version $version)
+    mvn_extra_args="-Drevision=${hadoop3_version} -Dhadoop.profile=3.0"
+    build_log="hadoop3_srctarball_install.log"
+    tarball_glob="hbase-*-hadoop3-*-bin.tar.gz"
+    echo "Follow the ref guide section on making a RC: Step 8 Build the 
hadoop3 binary tarball."
+  else
+    echo "Follow the ref guide section on making a RC: Step 8 Build the binary 
tarball."
+  fi
+  if mvn --threads=2 -DskipTests -Prelease --batch-mode 
-Dmaven.repo.local="${m2_tarbuild}" ${mvn_extra_args} clean install \
+    assembly:single >"${working_dir}/${build_log}" 2>&1; then
+    for artifact in "${unpack_dir}"/hbase-assembly/target/${tarball_glob}; do
+      if [ -f "${artifact}" ]; then
+        # TODO check the layout of the binary artifact we just made.
+        echo "Building a binary tarball from the source tarball succeeded."
+        return 0
+      fi
+    done
+  fi
+
+  echo "Building a binary tarball from the source tarball failed. see 
${working_dir}/${build_log} for details."
+  # Copy up the rat.txt to the working dir so available in build archive in 
case rat complaints.
+  # rat.txt can be under any module target dir... copy them all up renaming 
them to include parent dir as we go.
+  find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v 
"$NAME" "${working_dir}/${NAME//\//_}"; done
+  return 1
+}
+
 cd "${unpack_dir}"
-echo "Follow the ref guide section on making a RC: Step 8 Build the binary 
tarball."
-if mvn --threads=2 -DskipTests -Prelease --batch-mode 
-Dmaven.repo.local="${m2_tarbuild}" clean install \
-    assembly:single >"${working_dir}/srctarball_install.log" 2>&1; then
-  for artifact in "${unpack_dir}"/hbase-assembly/target/hbase-*-bin.tar.gz; do
-    if [ -f "${artifact}" ]; then
-      # TODO check the layout of the binary artifact we just made.
-      echo "Building a binary tarball from the source tarball succeeded."
-      exit 0
-    fi
-  done
+
+build_tarball 0
+if [ $? -ne 0 ]; then
+  exit 1
+fi
+
+mvn help:active-profiles | grep -q hadoop-3.0
+if [ $? -ne 0 ]; then
+  echo "The hadoop-3.0 profile is not activated by default, build a hadoop3 
tarball."
+  # move the previous tarballs out, so it will not be cleaned while building 
against hadoop3
+  mv "${unpack_dir}/hbase-assembly/target/hbase-*-bin.tar.gz" "${unpack_dir}/"
+  build_tarball 1
+  if [ $? -ne 0 ]; then
+    exit 1
+  fi
+  # move tarballs back
+  mv "${unpack_dir}/hbase-*-bin.tar.gz" "${unpack_dir}/hbase-assembly/target/"
 fi
-echo "Building a binary tarball from the source tarball failed. see 
${working_dir}/srctarball_install.log for details."
-# Copy up the rat.txt to the working dir so available in build archive in case 
rat complaints.
-# rat.txt can be under any module target dir... copy them all up renaming them 
to include parent dir as we go.
-find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v 
"$NAME" "${working_dir}/${NAME//\//_}"; done
-exit 1

Reply via email to