@@ -30,6 +30,20 @@ pipeline {
30
30
skipDefaultCheckout()
31
31
}
32
32
33
+ parameters {
34
+ booleanParam(name : ' DEBUG' ,
35
+ defaultValue : false ,
36
+ description : ' Print extra outputs for debugging the jenkins job and yetus' )
37
+ // the hbase and hadoop versions listed here need to match the matrix axes in the test
38
+ // section. it's not currently possible to reuse a single array for both purposes.
39
+ choice(name : ' HBASE_VERSION' ,
40
+ choices : [' all' , ' 2.2.6' , ' 2.3.3' ],
41
+ description : ' HBase releases to test. default is everything in the list.' )
42
+ choice(name : ' HADOOP_VERSION' ,
43
+ choices : [' all' , ' 3.2.1' , ' 2.10.0' ],
44
+ description : ' Hadoop versions to run each hbase version on. default is everything in the list.' )
45
+ }
46
+
33
47
environment {
34
48
SRC_REL = ' src'
35
49
PATCH_REL = ' output'
@@ -39,26 +53,26 @@ pipeline {
39
53
DOCKERFILE_REL = " ${ SRC_REL} /dev-support/jenkins/Dockerfile"
40
54
YETUS_DRIVER_REL = " ${ SRC_REL} /dev-support/jenkins/jenkins_precommit_github_yetus.sh"
41
55
ARCHIVE_PATTERN_LIST = ' *.dump'
42
- BUILD_URL_ARTIFACTS = " artifact/${ WORKDIR_REL} /${ PATCH_REL} "
43
56
SET_JAVA_HOME = ' /usr/local/openjdk-8'
44
- WORKDIR_REL = ' yetus-precommit-check'
45
- WORKDIR = " ${ WORKSPACE} /${ WORKDIR_REL} "
46
- SOURCEDIR = " ${ WORKDIR} /${ SRC_REL} "
47
- PATCHDIR = " ${ WORKDIR} /${ PATCH_REL} "
48
- DOCKERFILE = " ${ WORKDIR} /${ DOCKERFILE_REL} "
49
- YETUS_DRIVER = " ${ WORKDIR} /${ YETUS_DRIVER_REL} "
50
- YETUSDIR = " ${ WORKDIR} /${ YETUS_REL} "
51
57
PLUGINS = ' all'
52
58
}
53
59
54
- parameters {
55
- booleanParam(name : ' DEBUG' ,
56
- defaultValue : false ,
57
- description : ' Print extra outputs for debugging the jenkins job and yetus' )
58
- }
59
60
60
61
stages {
61
62
stage (' precommit checks' ) {
63
+ when {
64
+ changeRequest()
65
+ }
66
+ environment {
67
+ WORKDIR_REL = ' yetus-precommit-check'
68
+ WORKDIR = " ${ WORKSPACE} /${ WORKDIR_REL} "
69
+ SOURCEDIR = " ${ WORKDIR} /${ SRC_REL} "
70
+ PATCHDIR = " ${ WORKDIR} /${ PATCH_REL} "
71
+ DOCKERFILE = " ${ WORKDIR} /${ DOCKERFILE_REL} "
72
+ YETUS_DRIVER = " ${ WORKDIR} /${ YETUS_DRIVER_REL} "
73
+ YETUSDIR = " ${ WORKDIR} /${ YETUS_REL} "
74
+ BUILD_URL_ARTIFACTS = " artifact/${ WORKDIR_REL} /${ PATCH_REL} "
75
+ }
62
76
steps {
63
77
dir(" ${ SOURCEDIR} " ) {
64
78
checkout scm
@@ -124,18 +138,209 @@ pipeline {
124
138
}
125
139
}
126
140
}
141
+ stage (' noop htrace drop in' ) {
142
+ when {
143
+ anyOf {
144
+ changeset " hbase-noop-htrace/**"
145
+ changeset " dev-support/jenkins/Jenkinsfile"
146
+ }
147
+ }
148
+ tools {
149
+ // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
150
+ jdk " jdk_1.8_latest"
151
+ }
152
+ stages {
153
+ stage (' setup' ) {
154
+ tools {
155
+ maven ' maven_latest'
156
+ }
157
+ environment {
158
+ WORKDIR = " ${ WORKSPACE} /htrace-noop"
159
+ CACHE_DIR = " ${ WORKSPACE} /cache"
160
+ }
161
+ steps {
162
+ dir (" htrace-noop" ) {
163
+ dir (" component" ) {
164
+ echo ' Build the htrace replacement artifact.'
165
+ checkout scm
166
+ sh ' mvn -DskipTests -pl hbase-noop-htrace clean package'
167
+ }
168
+ dir (" tools" ) {
169
+ echo ' Downloading enabling scripts from main hbase repo.'
170
+ sh ''' #!/usr/bin/env bash
171
+ set -oe
172
+ declare script
173
+ declare -a needed_files
174
+ needed_files=( \
175
+ hbase_nightly_pseudo-distributed-test.sh \
176
+ jenkins-scripts/cache-apache-project-artifact.sh \
177
+ )
178
+ for script in "${needed_files[@]}"; do
179
+ curl -L -O https://raw.githubusercontent.com/apache/hbase/HEAD/dev-support/"${script}"
180
+ chmod +x "$(basename "${script}")"
181
+ done
182
+ '''
183
+ }
184
+ stash name : ' scripts' , includes : " tools/hbase_nightly_pseudo-distributed-test.sh"
185
+ dir (" hbase" ) {
186
+ script {
187
+ def hbase_versions = [ params. HBASE_VERSION ]
188
+ if (params. HBASE_VERSION == ' all' ) {
189
+ // this set needs to match the matrix axes below
190
+ hbase_versions = [ ' 2.2.6' , ' 2.3.3' ]
191
+ }
192
+ hbase_versions. each {
193
+ def hbase_version = it
194
+ sh """ #!/usr/bin/env bash
195
+ set -e
196
+ set -x
197
+ mkdir -p "downloads/hbase-${ hbase_version} "
198
+ mkdir -p "${ CACHE_DIR} "
199
+ echo 'downloading hbase version ${ hbase_version} '
200
+ '${ WORKDIR} /tools/cache-apache-project-artifact.sh' \
201
+ --working-dir '${ WORKDIR} /hbase/downloads/hbase-${ hbase_version} ' \
202
+ --keys 'https://downloads.apache.org/hbase/KEYS' \
203
+ '${ CACHE_DIR} /hbase-${ hbase_version} -bin.tar.gz' \
204
+ 'hbase/${ hbase_version} /hbase-${ hbase_version} -bin.tar.gz'
205
+ mkdir 'hbase-${ hbase_version} '
206
+ declare noop_htrace
207
+ noop_htrace="\$ (ls -1 '${ WORKDIR} /component/hbase-noop-htrace/target/'hbase-noop-htrace-*.jar | head -n 1)"
208
+ if [ -z "\$ {noop_htrace}" ]; then
209
+ echo "failed to find htrace noop replacement. did building step work?" >&2
210
+ exit 1
211
+ fi
212
+ '${ WORKDIR} /component/dev-support/jenkins/swap_htrace_jar.sh' '${ WORKDIR} /hbase/hbase-${ hbase_version} ' '${ CACHE_DIR} /hbase-${ hbase_version} -bin.tar.gz' "\$ {noop_htrace}"
213
+ """
214
+ stash name : " hbase-${ hbase_version} " , includes : " hbase-${ hbase_version} /**"
215
+ }
216
+ }
217
+ }
218
+ dir (" hadoop" ) {
219
+ script {
220
+ def hadoop_versions = [ params. HADOOP_VERSION ]
221
+ if (params. HADOOP_VERSION == ' all' ) {
222
+ // this set needs to match the matrix axes below
223
+ hadoop_versions = [ ' 3.2.1' , ' 2.10.0' ]
224
+ }
225
+ hadoop_versions. each {
226
+ def hadoop_version = it
227
+ sh """ #!/usr/bin/env bash
228
+ set -e
229
+ set -x
230
+ mkdir -p "downloads/hadoop-${ hadoop_version} "
231
+ mkdir -p "${ CACHE_DIR} "
232
+ echo "downloading hadoop version ${ hadoop_version} "
233
+ "${ WORKDIR} /tools/cache-apache-project-artifact.sh" \
234
+ --working-dir "${ WORKDIR} /hadoop/downloads/hadoop-${ hadoop_version} " \
235
+ --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
236
+ "${ CACHE_DIR} /hadoop-${ hadoop_version} -bin.tar.gz" \
237
+ "hadoop/common/hadoop-${ hadoop_version} /hadoop-${ hadoop_version} .tar.gz"
238
+ mkdir "hadoop-${ hadoop_version} "
239
+ declare noop_htrace
240
+ noop_htrace="\$ (ls -1 "${ WORKDIR} "/component/hbase-noop-htrace/target/hbase-noop-htrace-*.jar | head -n 1)"
241
+ if [ -z "\$ {noop_htrace}" ]; then
242
+ echo "failed to find htrace noop replacement. did building step work?" >&2
243
+ exit 1
244
+ fi
245
+ '${ WORKDIR} /component/dev-support/jenkins/swap_htrace_jar.sh' "${ WORKDIR} /hadoop/hadoop-${ hadoop_version} " "${ CACHE_DIR} /hadoop-${ hadoop_version} -bin.tar.gz" "\$ {noop_htrace}"
246
+ """
247
+ stash name : " hadoop-${ hadoop_version} " , includes : " hadoop-${ hadoop_version} /**"
248
+ }
249
+ }
250
+ }
251
+ }
252
+ }
253
+ post {
254
+ cleanup {
255
+ // clean up the working area but don't delete the download cache
256
+ dir (" htrace-noop" ) {
257
+ deleteDir()
258
+ }
259
+ }
260
+ }
261
+ }
262
+ stage (" test htrace drop in replacement" ) {
263
+ matrix {
264
+ agent {
265
+ label ' hbase'
266
+ }
267
+ axes {
268
+ axis {
269
+ name ' HBASE'
270
+ values ' 2.2.6' , ' 2.3.3'
271
+ }
272
+ axis {
273
+ name ' HADOOP'
274
+ values ' 3.2.1' , ' 2.10.0'
275
+ }
276
+ }
277
+ when {
278
+ allOf {
279
+ anyOf {
280
+ expression { params. HBASE_VERSION == ' all' }
281
+ expression { params. HBASE_VERSION == env. HBASE }
282
+ }
283
+ anyOf {
284
+ expression { params. HADOOP_VERSION == ' all' }
285
+ expression { params. HADOOP_VERSION == env. HADOOP }
286
+ }
287
+ }
288
+ }
289
+ stages {
290
+ stage (" test a specific hbase on a specific hadoop" ) {
291
+ steps {
292
+ unstash ' scripts'
293
+ unstash " hbase-${ env.HBASE} "
294
+ unstash " hadoop-${ env.HADOOP} "
295
+ sh ''' #!/usr/bin/env bash
296
+ set -eo
297
+ set -x
298
+ mkdir -p "hbase-${HBASE}.hadoop-${HADOOP}"
299
+ ./tools/hbase_nightly_pseudo-distributed-test.sh \
300
+ --single-process \
301
+ --working-dir "hbase-${HBASE}.hadoop-${HADOOP}" \
302
+ "hbase-${HBASE}" \
303
+ "hadoop-${HADOOP}/bin/hadoop" \
304
+ "hadoop-${HADOOP}"/share/hadoop/yarn/timelineservice \
305
+ "hadoop-${HADOOP}"/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
306
+ "hadoop-${HADOOP}"/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
307
+ "hadoop-${HADOOP}"/bin/mapred \
308
+ '''
309
+ }
310
+ post {
311
+ failure {
312
+ sh ''' #!/usr/bin/env bash
313
+ set -eo
314
+ set -x
315
+ find "hbase-${HBASE}.hadoop-${HADOOP}" \
316
+ "hbase-${HBASE}" "hadoop-${HADOOP}" \
317
+ -type d -name logs | \
318
+ xargs zip -r "hbase-${HBASE}.hadoop-${HADOOP}.logs.zip"
319
+ '''
320
+ archiveArtifacts artifacts : " hbase-${ env.HBASE} .hadoop-${ env.HADOOP} .logs.zip"
321
+ }
322
+ cleanup {
323
+ deleteDir()
324
+ }
325
+ }
326
+ }
327
+ }
328
+ }
329
+ }
330
+ }
331
+ }
127
332
}
128
333
129
334
post {
130
335
// Jenkins pipeline jobs fill slaves on PRs without this :(
131
- cleanup() {
336
+ cleanup {
132
337
script {
133
338
sh label : ' Cleanup workspace' , script : ''' #!/bin/bash -e
134
339
# See HADOOP-13951
135
340
chmod -R u+rxw "${WORKSPACE}"
136
- '''
137
- deleteDir()
341
+ '''
138
342
}
343
+ // we purposefully don't do a top level workspace cleanup so that we can reuse downloads
139
344
}
140
345
}
141
346
}
0 commit comments