-
Notifications
You must be signed in to change notification settings - Fork 4
/
Jenkinsfile
335 lines (297 loc) · 10.2 KB
/
Jenkinsfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
@Library('ecdc-pipeline')
import ecdcpipeline.ContainerBuildNode
import ecdcpipeline.PipelineBuilder
// Define number of old builds to keep. These numbers are somewhat arbitrary,
// but based on the fact that for the main branch we want to have a certain
// number of old builds available, while for the other branches we want to be
// able to deploy easily without using too much disk space.
def num_artifacts_to_keep
if (env.BRANCH_NAME == 'main') {
num_artifacts_to_keep = '5'
} else {
num_artifacts_to_keep = '2'
}
// Set number of old builds to keep
properties([[
$class: 'BuildDiscarderProperty',
strategy: [
$class: 'LogRotator',
artifactDaysToKeepStr: '',
artifactNumToKeepStr: num_artifacts_to_keep,
daysToKeepStr: '',
numToKeepStr: ''
]
]]);
// Define node labels for additional steps
def release_node = 'centos7-release' // Build for archiving artefact
def coverage_node = 'ubuntu2204' // Calculate test coverage
build_nodes = [
'almalinux8': ContainerBuildNode.getDefaultContainerBuildNode('almalinux8-gcc12'),
'centos7': ContainerBuildNode.getDefaultContainerBuildNode('centos7-gcc11'),
(release_node): ContainerBuildNode.getDefaultContainerBuildNode('centos7-gcc11'),
(coverage_node): ContainerBuildNode.getDefaultContainerBuildNode('ubuntu2204')
]
pipeline_builder = new PipelineBuilder(this, build_nodes)
pipeline_builder.activateEmailFailureNotifications()
// Define name for the archive file used in main pipeline and integration test
def archive_output = "${pipeline_builder.project}-${release_node}.tar.gz"
// Main build and test pipeline
builders = pipeline_builder.createBuilders { container ->
pipeline_builder.stage("${container.key}: Checkout") {
dir(pipeline_builder.project) {
scm_vars = checkout scm
}
// Copy source code to container
container.copyTo(pipeline_builder.project, pipeline_builder.project)
} // stage: checkout
pipeline_builder.stage("${container.key}: Dependencies") {
container.sh """
mkdir build
cd build
conan install --build=outdated ../${pipeline_builder.project}/conanfile.txt
conan info ../${pipeline_builder.project}/conanfile.txt > CONAN_INFO
"""
} // stage: dependencies
pipeline_builder.stage("${container.key}: Configuration") {
if (container.key == release_node) {
container.sh """
${pipeline_builder.project}/ci/configure-release.sh \
${pipeline_builder.project} \
build
"""
} else if (container.key == coverage_node) {
container.sh """
cd build
cmake -DCOV=ON -DRUN_DOXYGEN=ON -GNinja ../${pipeline_builder.project}
"""
} else {
container.sh """
cd build
cmake -DRUN_DOXYGEN=ON -GNinja ../${pipeline_builder.project}
"""
}
} // stage: configuration
pipeline_builder.stage("${container.key}: Build") {
container.sh """
cd build
ninja all
"""
} // stage: build
pipeline_builder.stage("${container.key}: Test") {
if (container.key == coverage_node) {
container.sh """
cd build
./bin/UnitTests -- --gtest_output=xml:test_results.xml
ninja coverage
"""
// Copy test and coverage results
container.copyFrom('build', '.')
// Publish test results
junit "build/test_results.xml"
// Process and publish test coverage
sh """
${pipeline_builder.project}/ci/redirect-coverage.sh \
build/coverage.xml \
${pipeline_builder.project}
"""
step([
$class: 'CoberturaPublisher',
autoUpdateHealth: true,
autoUpdateStability: true,
coberturaReportFile: 'build/coverage.xml',
failUnhealthy: false,
failUnstable: false,
maxNumberOfBuilds: 0,
onlyStable: false,
sourceEncoding: 'ASCII',
zoomCoverageChart: true
])
} else {
// Not a coverage node
container.sh """
cd build
./bin/UnitTests
"""
}
} // stage: test
pipeline_builder.stage("${container.key}: Documentation") {
container.sh """
cd build
ninja docs
"""
} // stage: documentation
if (container.key == release_node) {
pipeline_builder.stage("${container.key}: Archive") {
// Create archive file
container.sh """
cd build
rm -rf ${pipeline_builder.project}; mkdir ${pipeline_builder.project}
mkdir ${pipeline_builder.project}/bin
cp ./bin/kafka-to-nexus ${pipeline_builder.project}/bin/
cp ./bin/template-maker ${pipeline_builder.project}/bin/
cp -r ./lib ${pipeline_builder.project}/
cp -r ./licenses ${pipeline_builder.project}/
cp ./CONAN_INFO ${pipeline_builder.project}/
# Create file with build information
touch ${pipeline_builder.project}/BUILD_INFO
echo 'Repository: ${pipeline_builder.project}/${env.BRANCH_NAME}' \
>> ${pipeline_builder.project}/BUILD_INFO
echo 'Commit: ${scm_vars.GIT_COMMIT}' >> ${pipeline_builder.project}/BUILD_INFO
echo 'Jenkins build: ${env.BUILD_NUMBER}' >> ${pipeline_builder.project}/BUILD_INFO
tar czf ${archive_output} ${pipeline_builder.project}
"""
// Copy files from container and archive
container.copyFrom("build/${archive_output}", '.')
container.copyFrom("build/${pipeline_builder.project}/BUILD_INFO", '.')
archiveArtifacts "${archive_output},BUILD_INFO"
// Stash archive file for integration test in pull request builds
if (env.CHANGE_ID) {
stash "${archive_output}"
}
} // stage: archive
} // if
} // createBuilders
// Only run static checks in pull requests
if (env.CHANGE_ID) {
pr_checks_nodes = [
'pr-checks': ContainerBuildNode.getDefaultContainerBuildNode('ubuntu2204')
]
pr_pipeline_builder = new PipelineBuilder(this, pr_checks_nodes)
pr_pipeline_builder.activateEmailFailureNotifications()
pr_checks_builders = pr_pipeline_builder.createBuilders { container ->
pr_pipeline_builder.stage("${container.key}: Checkout") {
dir(pr_pipeline_builder.project) {
scm_vars = checkout scm
}
// Copy source code to container
container.copyTo(pr_pipeline_builder.project, pr_pipeline_builder.project)
} // stage: checkout
pr_pipeline_builder.stage("${container.key}: Clang-format") {
// Postpone failure to end of pipeline
catchError(stageResult: 'FAILURE') {
container.sh """
cd ${pr_pipeline_builder.project}
ci/check-formatting.sh
"""
} // catchError
} // stage: clang-format
pr_pipeline_builder.stage("${container.key}: Black") {
// Postpone failure to end of pipeline
catchError(stageResult: 'FAILURE') {
container.sh """
cd ${pr_pipeline_builder.project}
python3 -m black --version
python3 -m black --check integration-tests
"""
} // catchError
} // stage: black
pr_pipeline_builder.stage("${container.key}: Cppcheck") {
container.sh """
cd ${pr_pipeline_builder.project}
cppcheck --version
cppcheck \
--xml \
--inline-suppr \
--suppress=unusedFunction \
--suppress=missingInclude \
--enable=all \
--inconclusive \
src/ 2> cppcheck.xml
"""
// Copy files from container and publish report
container.copyFrom(
"${pr_pipeline_builder.project}/cppcheck.xml",
pr_pipeline_builder.project
)
dir("${pr_pipeline_builder.project}") {
recordIssues \
enabledForFailure: true,
quiet: true,
sourceCodeEncoding: 'UTF-8',
qualityGates: [[
threshold: 1,
type: 'TOTAL',
unstable: false
]],
tools: [cppCheck(pattern: 'cppcheck.xml', reportEncoding: 'UTF-8')]
} // dir
} // stage: cppcheck
} // PR checks createBuilders
builders = builders + pr_checks_builders
} // if
node('master') {
dir("${pipeline_builder.project}") {
scm_vars = checkout scm
}
try {
// Start pipelines
parallel builders
} catch (e) {
pipeline_builder.handleFailureMessages()
throw e
}
// Delete workspace when build is done
cleanWs()
}
// Only run integration test on pull requests
if (env.CHANGE_ID) {
node('docker') {
stage('checkout') {
checkout scm
unstash "${archive_output}"
sh "tar xvf ${archive_output}"
} // stage: checkout
stage("requirements") {
dir("integration-tests") {
sh """
scl enable rh-python38 -- python -m venv venv
venv/bin/pip install --upgrade pip
venv/bin/pip install -r requirements.txt
"""
} // dir
} // stage: requirements
try {
dir("integration-tests") {
stage("integration-test") {
// Stop and remove any containers that may have been from the job before,
// i.e. if a Jenkins job has been aborted.
sh """
docker stop \$(docker-compose ps -a -q) \
&& docker rm \$(docker-compose ps -a -q) \
|| true
"""
// Limit run to 30 minutes
timeout(time: 30, activity: true) {
sh """
chmod go+w logs output-files
LD_LIBRARY_PATH=../lib venv/bin/python -m pytest \
-s \
--writer-binary="../kafka-to-nexus" \
--junitxml=./IntegrationTestsOutput.xml \
.
"""
} // timeout
} // stage: integration-test
} // dir
} finally {
dir("integration-tests") {
stage ("clean-up") {
// The statements below return true because the build should pass
// even if there are no docker containers or output files to be
// removed.
sh """
rm -rf output-files/* || true
docker stop \$(docker-compose ps -a -q) \
&& docker rm \$(docker-compose ps -a -q) \
|| true
chmod go-w logs output-files
"""
} // stage: clean-up
stage("results") {
junit "IntegrationTestsOutput.xml"
archiveArtifacts "logs/*.txt"
} // stage: results
} // dir
} // try/finally
} // node
} // if