Skip to content

Commit

Permalink
HBASE-28694 Make client integration and packaging test work with java…
Browse files Browse the repository at this point in the history
… 17 (#6035)

Signed-off-by: Xin Sun <sunxin@apache.org>
(cherry picked from commit c722dde)
  • Loading branch information
Apache9 committed Jun 29, 2024
1 parent 77879ac commit 5ed037d
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 30 deletions.
50 changes: 32 additions & 18 deletions dev-support/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -758,11 +758,6 @@ pipeline {
label 'hbase-large'
}
}
tools {
maven 'maven_latest'
// this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
jdk "jdk_1.8_latest"
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
BRANCH = "${env.BRANCH_NAME}"
Expand Down Expand Up @@ -797,21 +792,25 @@ pipeline {
echo "got the following saved stats in 'output-srctarball/machine'"
ls -lh "output-srctarball/machine"
'''
sh """#!/bin/bash -e
sh '''#!/bin/bash -e
echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" --workdir=/hbase hbase-integration-test \
"component/dev-support/hbase_nightly_source-artifact.sh" \
--intermediate-file-dir output-srctarball \
--unpack-temp-dir unpacked_src_tarball \
--maven-m2-initial .m2-for-repo \
--maven-m2-src-build .m2-for-src \
--clean-source-checkout \
"${env.BASEDIR}" ; then
component
if [ $? -eq 0 ]; then
echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
else
echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
exit 1
fi
"""
'''
echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
sh '''#!/bin/bash -e
if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | grep -v hadoop3 | wc -l) ]; then
Expand All @@ -834,21 +833,25 @@ pipeline {
'''
unstash 'hadoop-2'
sh '''#!/bin/bash -xe
if [[ "${BRANCH}" = branch-2* ]]; then
if [[ "${BRANCH}" == *"branch-2"* ]]; then
echo "Attempting to use run an instance on top of Hadoop 2."
artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-8" --workdir=/hbase hbase-integration-test \
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
--single-process \
--working-dir output-integration/hadoop-2 \
--hbase-client-install "hbase-client" \
"hbase-install" \
"hadoop-2/bin/hadoop" \
hbase-install \
hadoop-2/bin/hadoop \
hadoop-2/share/hadoop/yarn/timelineservice \
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-2/bin/mapred \
>output-integration/hadoop-2.log 2>&1 ; then
>output-integration/hadoop-2.log 2>&1
if [ $? -ne 0 ]; then
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
exit 2
fi
Expand All @@ -870,7 +873,12 @@ pipeline {
hbase_install_dir="hbase-hadoop3-install"
hbase_client_dir="hbase-hadoop3-client"
fi
if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
-e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" \
--workdir=/hbase hbase-integration-test \
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
--single-process \
--working-dir output-integration/hadoop-3 \
--hbase-client-install ${hbase_client_dir} \
Expand All @@ -880,12 +888,17 @@ pipeline {
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-3/bin/mapred \
>output-integration/hadoop-3.log 2>&1 ; then
>output-integration/hadoop-3.log 2>&1
if [ $? -ne 0 ]; then
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
exit 2
fi
echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
-e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" \
--workdir=/hbase hbase-integration-test \
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
--single-process \
--hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
--working-dir output-integration/hadoop-3-shaded \
Expand All @@ -896,7 +909,8 @@ pipeline {
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-3/bin/mapred \
>output-integration/hadoop-3-shaded.log 2>&1 ; then
>output-integration/hadoop-3-shaded.log 2>&1
if [ $? -ne 0 ]; then
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
exit 2
fi
Expand Down
4 changes: 2 additions & 2 deletions dev-support/hbase_nightly_pseudo-distributed-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -509,11 +509,11 @@ public class HBaseClientReadWriteExample {
}
EOF
redirect_and_run "${working_dir}/hbase-shaded-client-compile" \
javac -cp "${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hadoop_jars}" "${working_dir}/HBaseClientReadWriteExample.java"
$JAVA_HOME/bin/javac -cp "${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hadoop_jars}" "${working_dir}/HBaseClientReadWriteExample.java"
echo "Running shaded client example. It'll fetch the set of regions, round-trip them to a file in HDFS, then write them one-per-row into the test table."
# The order of classpath entries here is important. if we're using non-shaded Hadoop 3 / 2.9.0 jars, we have to work around YARN-2190.
redirect_and_run "${working_dir}/hbase-shaded-client-example" \
java -cp "${working_dir}/hbase-conf/:${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hbase_dep_classpath}:${working_dir}:${hadoop_jars}" HBaseClientReadWriteExample
$JAVA_HOME/bin/java -cp "${working_dir}/hbase-conf/:${hbase_client}/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${hbase_version}.jar:${hbase_dep_classpath}:${working_dir}:${hadoop_jars}" HBaseClientReadWriteExample

echo "Checking on results of example program."
"${hadoop_exec}" --config "${working_dir}/hbase-conf/" fs -copyToLocal "example-region-listing.data" "${working_dir}/example-region-listing.data"
Expand Down
30 changes: 20 additions & 10 deletions dev-support/hbase_nightly_source-artifact.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ function usage {
echo " a git checkout, including ignored files."
exit 1
}

MVN="mvn"
if ! command -v mvn &>/dev/null; then
MVN=$MAVEN_HOME/bin/mvn
fi
# if no args specified, show usage
if [ $# -lt 1 ]; then
usage
Expand Down Expand Up @@ -124,7 +129,7 @@ fi
# See http://hbase.apache.org/book.html#maven.release

echo "Maven details, in case our JDK doesn't match expectations:"
mvn --version --offline | tee "${working_dir}/maven_version"
${MVN} --version --offline | tee "${working_dir}/maven_version"

echo "Do a clean building of the source artifact using code in ${component_dir}"
cd "${component_dir}"
Expand Down Expand Up @@ -184,16 +189,16 @@ function build_tarball {
local build_log="srctarball_install.log"
local tarball_glob="hbase-*-bin.tar.gz"
if [ $build_hadoop3 -ne 0 ]; then
local version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)
local version=$(${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:evaluate -Dexpression=project.version -q -DforceStdout)
local hadoop3_version=$(get_hadoop3_version $version)
mvn_extra_args="-Drevision=${hadoop3_version} -Dhadoop.profile=3.0"
build_log="hadoop3_srctarball_install.log"
tarball_glob="hbase-*-hadoop3-*-bin.tar.gz"
echo "Follow the ref guide section on making a RC: Step 8 Build the hadoop3 binary tarball."
else
echo "Follow the ref guide section on making a RC: Step 8 Build the binary tarball."
echo "Follow the ref guide section on making a RC: Step 7 Build the binary tarball."
fi
if mvn --threads=2 -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" ${mvn_extra_args} clean install \
if ${MVN} --threads=2 -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" ${mvn_extra_args} clean install \
assembly:single >"${working_dir}/${build_log}" 2>&1; then
for artifact in "${unpack_dir}"/hbase-assembly/target/${tarball_glob}; do
if [ -f "${artifact}" ]; then
Expand All @@ -213,20 +218,25 @@ function build_tarball {

cd "${unpack_dir}"

build_tarball 0
${MVN} -Dmaven.repo.local="${m2_tarbuild}" help:active-profiles | grep -q hadoop-3.0
if [ $? -ne 0 ]; then
exit 1
fi
echo "The hadoop-3.0 profile is not activated by default, build a default tarball first."
# use java 8 to build with hadoop2
JAVA_HOME="/usr/lib/jvm/java-8" build_tarball 0
if [ $? -ne 0 ]; then
exit 1
fi

mvn help:active-profiles | grep -q hadoop-3.0
if [ $? -ne 0 ]; then
echo "The hadoop-3.0 profile is not activated by default, build a hadoop3 tarball."
# move the previous tarballs out, so it will not be cleaned while building against hadoop3
mv "${unpack_dir}"/hbase-assembly/target/hbase-*-bin.tar.gz "${unpack_dir}"/
echo "build a hadoop3 tarball."
build_tarball 1
if [ $? -ne 0 ]; then
exit 1
fi
# move tarballs back
mv "${unpack_dir}"/hbase-*-bin.tar.gz "${unpack_dir}"/hbase-assembly/target/
else
echo "The hadoop-3.0 profile is activated by default, build a default tarball."
build_tarball 0
fi

0 comments on commit 5ed037d

Please sign in to comment.