Skip to content

Commit a8e3629

Browse files
committed
HBASE-24802 make a drop-in compatible impl of htrace APIs that does not do anything
closes #36 Signed-off-by: Duo Zhang <[email protected]>
1 parent d0a3f39 commit a8e3629

28 files changed

+2239
-16
lines changed

dev-support/jenkins/Jenkinsfile

+221-16
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,20 @@ pipeline {
3030
skipDefaultCheckout()
3131
}
3232

33+
parameters {
34+
booleanParam(name: 'DEBUG',
35+
defaultValue: false,
36+
description: 'Print extra outputs for debugging the jenkins job and yetus')
37+
// the hbase and hadoop versions listed here need to match the matrix axes in the test
38+
// section. it's not currently possible to reuse a single array for both purposes.
39+
choice(name: 'HBASE_VERSION',
40+
choices: ['all', '2.2.6', '2.3.3'],
41+
description: 'HBase releases to test. default is everything in the list.')
42+
choice(name: 'HADOOP_VERSION',
43+
choices: ['all', '3.2.1', '2.10.0'],
44+
description: 'Hadoop versions to run each hbase version on. default is everything in the list.')
45+
}
46+
3347
environment {
3448
SRC_REL = 'src'
3549
PATCH_REL = 'output'
@@ -39,26 +53,26 @@ pipeline {
3953
DOCKERFILE_REL = "${SRC_REL}/dev-support/jenkins/Dockerfile"
4054
YETUS_DRIVER_REL = "${SRC_REL}/dev-support/jenkins/jenkins_precommit_github_yetus.sh"
4155
ARCHIVE_PATTERN_LIST = '*.dump'
42-
BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
4356
SET_JAVA_HOME = '/usr/local/openjdk-8'
44-
WORKDIR_REL = 'yetus-precommit-check'
45-
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
46-
SOURCEDIR = "${WORKDIR}/${SRC_REL}"
47-
PATCHDIR = "${WORKDIR}/${PATCH_REL}"
48-
DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
49-
YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
50-
YETUSDIR = "${WORKDIR}/${YETUS_REL}"
5157
PLUGINS = 'all'
5258
}
5359

54-
parameters {
55-
booleanParam(name: 'DEBUG',
56-
defaultValue: false,
57-
description: 'Print extra outputs for debugging the jenkins job and yetus')
58-
}
5960

6061
stages {
6162
stage ('precommit checks') {
63+
when {
64+
changeRequest()
65+
}
66+
environment {
67+
WORKDIR_REL = 'yetus-precommit-check'
68+
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
69+
SOURCEDIR = "${WORKDIR}/${SRC_REL}"
70+
PATCHDIR = "${WORKDIR}/${PATCH_REL}"
71+
DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
72+
YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
73+
YETUSDIR = "${WORKDIR}/${YETUS_REL}"
74+
BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
75+
}
6276
steps {
6377
dir("${SOURCEDIR}") {
6478
checkout scm
@@ -124,18 +138,209 @@ pipeline {
124138
}
125139
}
126140
}
141+
stage ('noop htrace drop in') {
142+
when {
143+
anyOf {
144+
changeset "hbase-noop-htrace/**"
145+
changeset "dev-support/jenkins/Jenkinsfile"
146+
}
147+
}
148+
tools {
149+
// this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
150+
jdk "jdk_1.8_latest"
151+
}
152+
stages {
153+
stage ('setup') {
154+
tools {
155+
maven 'maven_latest'
156+
}
157+
environment {
158+
WORKDIR = "${WORKSPACE}/htrace-noop"
159+
CACHE_DIR = "${WORKSPACE}/cache"
160+
}
161+
steps {
162+
dir ("htrace-noop") {
163+
dir ("component") {
164+
echo 'Build the htrace replacement artifact.'
165+
checkout scm
166+
sh 'mvn -DskipTests -pl hbase-noop-htrace clean package'
167+
}
168+
dir ("tools") {
169+
echo 'Downloading enabling scripts from main hbase repo.'
170+
sh '''#!/usr/bin/env bash
171+
set -oe
172+
declare script
173+
declare -a needed_files
174+
needed_files=( \
175+
hbase_nightly_pseudo-distributed-test.sh \
176+
jenkins-scripts/cache-apache-project-artifact.sh \
177+
)
178+
for script in "${needed_files[@]}"; do
179+
curl -L -O https://raw.githubusercontent.com/apache/hbase/HEAD/dev-support/"${script}"
180+
chmod +x "$(basename "${script}")"
181+
done
182+
'''
183+
}
184+
stash name: 'scripts', includes: "tools/hbase_nightly_pseudo-distributed-test.sh"
185+
dir ("hbase") {
186+
script {
187+
def hbase_versions = [ params.HBASE_VERSION ]
188+
if (params.HBASE_VERSION == 'all') {
189+
// this set needs to match the matrix axes below
190+
hbase_versions = [ '2.2.6', '2.3.3' ]
191+
}
192+
hbase_versions.each {
193+
def hbase_version = it
194+
sh """#!/usr/bin/env bash
195+
set -e
196+
set -x
197+
mkdir -p "downloads/hbase-${hbase_version}"
198+
mkdir -p "${CACHE_DIR}"
199+
echo 'downloading hbase version ${hbase_version}'
200+
'${WORKDIR}/tools/cache-apache-project-artifact.sh' \
201+
--working-dir '${WORKDIR}/hbase/downloads/hbase-${hbase_version}' \
202+
--keys 'https://downloads.apache.org/hbase/KEYS' \
203+
'${CACHE_DIR}/hbase-${hbase_version}-bin.tar.gz' \
204+
'hbase/${hbase_version}/hbase-${hbase_version}-bin.tar.gz'
205+
mkdir 'hbase-${hbase_version}'
206+
declare noop_htrace
207+
noop_htrace="\$(ls -1 '${WORKDIR}/component/hbase-noop-htrace/target/'hbase-noop-htrace-*.jar | head -n 1)"
208+
if [ -z "\${noop_htrace}" ]; then
209+
echo "failed to find htrace noop replacement. did building step work?" >&2
210+
exit 1
211+
fi
212+
'${WORKDIR}/component/dev-support/jenkins/swap_htrace_jar.sh' '${WORKDIR}/hbase/hbase-${hbase_version}' '${CACHE_DIR}/hbase-${hbase_version}-bin.tar.gz' "\${noop_htrace}"
213+
"""
214+
stash name: "hbase-${hbase_version}", includes: "hbase-${hbase_version}/**"
215+
}
216+
}
217+
}
218+
dir ("hadoop") {
219+
script {
220+
def hadoop_versions = [ params.HADOOP_VERSION ]
221+
if (params.HADOOP_VERSION == 'all') {
222+
// this set needs to match the matrix axes below
223+
hadoop_versions = [ '3.2.1', '2.10.0']
224+
}
225+
hadoop_versions.each {
226+
def hadoop_version = it
227+
sh """#!/usr/bin/env bash
228+
set -e
229+
set -x
230+
mkdir -p "downloads/hadoop-${hadoop_version}"
231+
mkdir -p "${CACHE_DIR}"
232+
echo "downloading hadoop version ${hadoop_version}"
233+
"${WORKDIR}/tools/cache-apache-project-artifact.sh" \
234+
--working-dir "${WORKDIR}/hadoop/downloads/hadoop-${hadoop_version}" \
235+
--keys 'https://downloads.apache.org/hadoop/common/KEYS' \
236+
"${CACHE_DIR}/hadoop-${hadoop_version}-bin.tar.gz" \
237+
"hadoop/common/hadoop-${hadoop_version}/hadoop-${hadoop_version}.tar.gz"
238+
mkdir "hadoop-${hadoop_version}"
239+
declare noop_htrace
240+
noop_htrace="\$(ls -1 "${WORKDIR}"/component/hbase-noop-htrace/target/hbase-noop-htrace-*.jar | head -n 1)"
241+
if [ -z "\${noop_htrace}" ]; then
242+
echo "failed to find htrace noop replacement. did building step work?" >&2
243+
exit 1
244+
fi
245+
'${WORKDIR}/component/dev-support/jenkins/swap_htrace_jar.sh' "${WORKDIR}/hadoop/hadoop-${hadoop_version}" "${CACHE_DIR}/hadoop-${hadoop_version}-bin.tar.gz" "\${noop_htrace}"
246+
"""
247+
stash name: "hadoop-${hadoop_version}", includes: "hadoop-${hadoop_version}/**"
248+
}
249+
}
250+
}
251+
}
252+
}
253+
post {
254+
cleanup {
255+
// clean up the working area but don't delete the download cache
256+
dir ("htrace-noop") {
257+
deleteDir()
258+
}
259+
}
260+
}
261+
}
262+
stage ("test htrace drop in replacement") {
263+
matrix {
264+
agent {
265+
label 'hbase'
266+
}
267+
axes {
268+
axis {
269+
name 'HBASE'
270+
values '2.2.6', '2.3.3'
271+
}
272+
axis {
273+
name 'HADOOP'
274+
values '3.2.1', '2.10.0'
275+
}
276+
}
277+
when {
278+
allOf {
279+
anyOf {
280+
expression { params.HBASE_VERSION == 'all' }
281+
expression { params.HBASE_VERSION == env.HBASE }
282+
}
283+
anyOf {
284+
expression { params.HADOOP_VERSION == 'all' }
285+
expression { params.HADOOP_VERSION == env.HADOOP }
286+
}
287+
}
288+
}
289+
stages {
290+
stage ("test a specific hbase on a specific hadoop") {
291+
steps {
292+
unstash 'scripts'
293+
unstash "hbase-${env.HBASE}"
294+
unstash "hadoop-${env.HADOOP}"
295+
sh '''#!/usr/bin/env bash
296+
set -eo
297+
set -x
298+
mkdir -p "hbase-${HBASE}.hadoop-${HADOOP}"
299+
./tools/hbase_nightly_pseudo-distributed-test.sh \
300+
--single-process \
301+
--working-dir "hbase-${HBASE}.hadoop-${HADOOP}" \
302+
"hbase-${HBASE}" \
303+
"hadoop-${HADOOP}/bin/hadoop" \
304+
"hadoop-${HADOOP}"/share/hadoop/yarn/timelineservice \
305+
"hadoop-${HADOOP}"/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
306+
"hadoop-${HADOOP}"/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
307+
"hadoop-${HADOOP}"/bin/mapred \
308+
'''
309+
}
310+
post {
311+
failure {
312+
sh '''#!/usr/bin/env bash
313+
set -eo
314+
set -x
315+
find "hbase-${HBASE}.hadoop-${HADOOP}" \
316+
"hbase-${HBASE}" "hadoop-${HADOOP}" \
317+
-type d -name logs | \
318+
xargs zip -r "hbase-${HBASE}.hadoop-${HADOOP}.logs.zip"
319+
'''
320+
archiveArtifacts artifacts: "hbase-${env.HBASE}.hadoop-${env.HADOOP}.logs.zip"
321+
}
322+
cleanup {
323+
deleteDir()
324+
}
325+
}
326+
}
327+
}
328+
}
329+
}
330+
}
331+
}
127332
}
128333

129334
post {
130335
// Jenkins pipeline jobs fill slaves on PRs without this :(
131-
cleanup() {
336+
cleanup {
132337
script {
133338
sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
134339
# See HADOOP-13951
135340
chmod -R u+rxw "${WORKSPACE}"
136-
'''
137-
deleteDir()
341+
'''
138342
}
343+
// we purposefully don't do a top level workspace cleanup so that we can reuse downloads
139344
}
140345
}
141346
}
+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
#!/usr/bin/env bash
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
19+
# Call it like ./swap_htrace_jar.sh /some/place/to/target /a/path/to/component.tar.gz /a/path/to/hbase-noop-htrace.jar
20+
set -e
21+
if [ -n "${DEBUG}" ]; then
22+
set -x
23+
fi
24+
declare destination="$1"
25+
declare tarball="$2"
26+
declare noop_htrace="$3"
27+
echo "unpack the tarball, but skip htrace artifacts."
28+
tar -xzf "${tarball}" --strip-components=1 --exclude 'htrace*.jar' -C "${destination}"
29+
echo "insert a copy of our replacement artifact for htrace."
30+
for htrace_location in $(tar -tzf "${tarball}" | grep -E 'htrace.*jar' ); do
31+
htrace_location="$(dirname "${htrace_location#*/}")"
32+
echo " placing htrace jar at ${htrace_location}"
33+
cp "${noop_htrace}" "${destination}/${htrace_location}"
34+
done;

0 commit comments

Comments
 (0)