This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit c9c7fed08918e728631dc9a9e3192b315b7b8155
Author: Duo Zhang <zhang...@apache.org>
AuthorDate: Sat Jun 29 21:36:57 2024 +0800

    HBASE-28694 Make client integration and packaging test work with java 17 
(#6035)
    
    Signed-off-by: Xin Sun <sun...@apache.org>
    (cherry picked from commit c722dde59fcfc8b5e176e8e92923d759bd761163)
---
 dev-support/Jenkinsfile                            | 50 ++++++++++++++--------
 .../hbase_nightly_pseudo-distributed-test.sh       |  4 +-
 dev-support/hbase_nightly_source-artifact.sh       | 30 ++++++++-----
 3 files changed, 54 insertions(+), 30 deletions(-)

diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 0a47726e53c..94652560614 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -758,11 +758,6 @@ pipeline {
               label 'hbase-large'
             }
           }
-          tools {
-            maven 'maven_latest'
-            // this needs to be set to the jdk that ought to be used to build 
releases on the branch the Jenkinsfile is stored in.
-            jdk "jdk_1.8_latest"
-          }
           environment {
             BASEDIR = "${env.WORKSPACE}/component"
             BRANCH = "${env.BRANCH_NAME}"
@@ -797,21 +792,25 @@ pipeline {
               echo "got the following saved stats in 
'output-srctarball/machine'"
               ls -lh "output-srctarball/machine"
             '''
-            sh """#!/bin/bash -e
+            sh '''#!/bin/bash -e
               echo "Checking the steps for an RM to make a source artifact, 
then a binary artifact."
-              if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" 
\
+              docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+              docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                  -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" 
--workdir=/hbase hbase-integration-test \
+                  "component/dev-support/hbase_nightly_source-artifact.sh" \
                   --intermediate-file-dir output-srctarball \
                   --unpack-temp-dir unpacked_src_tarball \
                   --maven-m2-initial .m2-for-repo \
                   --maven-m2-src-build .m2-for-src \
                   --clean-source-checkout \
-                  "${env.BASEDIR}" ; then
+                  component
+              if [ $? -eq 0 ]; then
                 echo '(/) {color:green}+1 source release artifact{color}\n-- 
See build output for details.' >output-srctarball/commentfile
               else
                 echo '(x) {color:red}-1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
                 exit 1
               fi
-            """
+            '''
             echo "unpacking the hbase bin tarball into 'hbase-install' and the 
client tarball into 'hbase-client'"
             sh '''#!/bin/bash -e
               if [ 2 -ne $(ls -1 
"${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | 
grep -v hadoop3 | wc -l) ]; then
@@ -834,21 +833,25 @@ pipeline {
             '''
             unstash 'hadoop-2'
             sh '''#!/bin/bash -xe
-              if [[ "${BRANCH}" = branch-2* ]]; then
+              if [[ "${BRANCH}" == *"branch-2"* ]]; then
                 echo "Attempting to use run an instance on top of Hadoop 2."
                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
-                if ! 
"${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
+                docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+                docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                    -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-8" 
--workdir=/hbase hbase-integration-test \
+                    
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
                     --single-process \
                     --working-dir output-integration/hadoop-2 \
                     --hbase-client-install "hbase-client" \
-                    "hbase-install" \
-                    "hadoop-2/bin/hadoop" \
+                    hbase-install \
+                    hadoop-2/bin/hadoop \
                     hadoop-2/share/hadoop/yarn/timelineservice \
                     
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                     
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
                     hadoop-2/bin/mapred \
-                    >output-integration/hadoop-2.log 2>&1 ; then
+                    >output-integration/hadoop-2.log 2>&1
+                if [ $? -ne 0 ]; then
                   echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 2. [see log 
for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that 
this means we didn't run on Hadoop 3)" >output-integration/commentfile
                   exit 2
                 fi
@@ -870,7 +873,12 @@ pipeline {
                 hbase_install_dir="hbase-hadoop3-install"
                 hbase_client_dir="hbase-hadoop3-client"
               fi
-              if ! 
"${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
+              docker build -t hbase-integration-test -f 
"${BASEDIR}/dev-support/docker/Dockerfile" .
+              docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                  -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
+                  -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" 
\
+                  --workdir=/hbase hbase-integration-test \
+                  
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
                   --single-process \
                   --working-dir output-integration/hadoop-3 \
                   --hbase-client-install ${hbase_client_dir} \
@@ -880,12 +888,17 @@ pipeline {
                   
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                   
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
                   hadoop-3/bin/mapred \
-                  >output-integration/hadoop-3.log 2>&1 ; then
+                  >output-integration/hadoop-3.log 2>&1
+              if [ $? -ne 0 ]; then
                 echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 3. [see log 
for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that 
this means we didn't check the Hadoop 3 shaded client)" 
>output-integration/commentfile
                 exit 2
               fi
               echo "Attempting to use run an instance on top of Hadoop 3, 
relying on the Hadoop client artifacts for the example client program."
-              if ! 
"${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
+              docker run --rm -v "${WORKSPACE}":/hbase -v 
/etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
+                  -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
+                  -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" 
\
+                  --workdir=/hbase hbase-integration-test \
+                  
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
                   --single-process \
                   --hadoop-client-classpath 
hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar
 \
                   --working-dir output-integration/hadoop-3-shaded \
@@ -896,7 +909,8 @@ pipeline {
                   
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                   
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
                   hadoop-3/bin/mapred \
-                  >output-integration/hadoop-3-shaded.log 2>&1 ; then
+                  >output-integration/hadoop-3-shaded.log 2>&1
+              if [ $? -ne 0 ]; then
                 echo "(x) {color:red}-1 client integration 
test{color}\n--Failed when running client tests on top of Hadoop 3 using 
Hadoop's shaded client. [see log for 
details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." 
>output-integration/commentfile
                 exit 2
               fi
diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh 
b/dev-support/hbase_nightly_pseudo-distributed-test.sh
index 501b4833857..9292222cf52 100755
--- a/dev-support/hbase_nightly_pseudo-distributed-test.sh
+++ b/dev-support/hbase_nightly_pseudo-distributed-test.sh
@@ -509,11 +509,11 @@ public class HBaseClientReadWriteExample {
 }
 EOF
 redirect_and_run "${working_dir}/hbase-shaded-client-compile" \
-    javac -cp 
"${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hadoop_jars}"
 "${working_dir}/HBaseClientReadWriteExample.java"
+    $JAVA_HOME/bin/javac -cp 
"${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hadoop_jars}"
 "${working_dir}/HBaseClientReadWriteExample.java"
 echo "Running shaded client example. It'll fetch the set of regions, 
round-trip them to a file in HDFS, then write them one-per-row into the test 
table."
 # The order of classpath entries here is important. if we're using non-shaded 
Hadoop 3 / 2.9.0 jars, we have to work around YARN-2190.
 redirect_and_run "${working_dir}/hbase-shaded-client-example" \
-    java -cp 
"${working_dir}/hbase-conf/:${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hbase_dep_classpath}:${working_dir}:${hadoop_jars}"
 HBaseClientReadWriteExample
+    $JAVA_HOME/bin/java -cp 
"${working_dir}/hbase-conf/:${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hbase_dep_classpath}:${working_dir}:${hadoop_jars}"
 HBaseClientReadWriteExample
 
 echo "Checking on results of example program."
 "${hadoop_exec}" --config "${working_dir}/hbase-conf/" fs -copyToLocal 
"example-region-listing.data" "${working_dir}/example-region-listing.data"
diff --git a/dev-support/hbase_nightly_source-artifact.sh 
b/dev-support/hbase_nightly_source-artifact.sh
index 410406a1516..372c39b8f88 100755
--- a/dev-support/hbase_nightly_source-artifact.sh
+++ b/dev-support/hbase_nightly_source-artifact.sh
@@ -33,6 +33,11 @@ function usage {
   echo "                                          a git checkout, including 
ignored files."
   exit 1
 }
+
+MVN="mvn"
+if ! command -v mvn &>/dev/null; then
+  MVN=$MAVEN_HOME/bin/mvn
+fi
 # if no args specified, show usage
 if [ $# -lt 1 ]; then
   usage
@@ -124,7 +129,7 @@ fi
 # See http://hbase.apache.org/book.html#maven.release
 
 echo "Maven details, in case our JDK doesn't match expectations:"
-mvn --version --offline | tee "${working_dir}/maven_version"
+${MVN} --version --offline | tee "${working_dir}/maven_version"
 
 echo "Do a clean building of the source artifact using code in 
${component_dir}"
 cd "${component_dir}"
@@ -184,16 +189,16 @@ function build_tarball {
   local build_log="srctarball_install.log"
   local tarball_glob="hbase-*-bin.tar.gz"
   if [ $build_hadoop3 -ne 0 ]; then
-    local version=$(mvn help:evaluate -Dexpression=project.version -q 
-DforceStdout)
+    local version=$(${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:evaluate 
-Dexpression=project.version -q -DforceStdout)
     local hadoop3_version=$(get_hadoop3_version $version)
     mvn_extra_args="-Drevision=${hadoop3_version} -Dhadoop.profile=3.0"
     build_log="hadoop3_srctarball_install.log"
     tarball_glob="hbase-*-hadoop3-*-bin.tar.gz"
     echo "Follow the ref guide section on making a RC: Step 8 Build the 
hadoop3 binary tarball."
   else
-    echo "Follow the ref guide section on making a RC: Step 8 Build the binary 
tarball."
+    echo "Follow the ref guide section on making a RC: Step 7 Build the binary 
tarball."
   fi
-  if mvn --threads=2 -DskipTests -Prelease --batch-mode 
-Dmaven.repo.local="${m2_tarbuild}" ${mvn_extra_args} clean install \
+  if ${MVN} --threads=2 -DskipTests -Prelease --batch-mode 
-Dmaven.repo.local="${m2_tarbuild}" ${mvn_extra_args} clean install \
     assembly:single >"${working_dir}/${build_log}" 2>&1; then
     for artifact in "${unpack_dir}"/hbase-assembly/target/${tarball_glob}; do
       if [ -f "${artifact}" ]; then
@@ -213,20 +218,25 @@ function build_tarball {
 
 cd "${unpack_dir}"
 
-build_tarball 0
+${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:active-profiles | grep -q 
hadoop-3.0
 if [ $? -ne 0 ]; then
-  exit 1
-fi
+  echo "The hadoop-3.0 profile is not activated by default, build a default 
tarball first."
+  # use java 8 to build with hadoop2
+  JAVA_HOME="/usr/lib/jvm/java-8" build_tarball 0
+  if [ $? -ne 0 ]; then
+    exit 1
+  fi
 
-mvn help:active-profiles | grep -q hadoop-3.0
-if [ $? -ne 0 ]; then
-  echo "The hadoop-3.0 profile is not activated by default, build a hadoop3 
tarball."
   # move the previous tarballs out, so it will not be cleaned while building 
against hadoop3
   mv "${unpack_dir}"/hbase-assembly/target/hbase-*-bin.tar.gz "${unpack_dir}"/
+  echo "build a hadoop3 tarball."
   build_tarball 1
   if [ $? -ne 0 ]; then
     exit 1
   fi
   # move tarballs back
   mv "${unpack_dir}"/hbase-*-bin.tar.gz "${unpack_dir}"/hbase-assembly/target/
+else
+  echo "The hadoop-3.0 profile is activated by default, build a default 
tarball."
+  build_tarball 0
 fi

Reply via email to