blob: b4065c2f34963fcf5f993beee851a17b3ef4c6cc [file] [log] [blame]
Xinyu Zhangf27f6032020-11-03 15:58:24 +08001#!/usr/bin/env groovy
2//-------------------------------------------------------------------------------
3// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
4//
5// SPDX-License-Identifier: BSD-3-Clause
6//
7//-------------------------------------------------------------------------------
8
9@Library('trustedfirmware') _
10import org.trustedfirmware.Gerrit
11import org.trustedfirmware.Summary
12
13mapPlatform = ["cypress/psoc64": "psoc64",
14 "mps2/an519": "AN519",
15 "mps2/an521": "AN521",
16 "mps2/an539": "AN539",
17 "mps2/sse-200_aws": "SSE-200_AWS",
18 "mps3/an524": "AN524",
19 "musca_a": "MUSCA_A",
20 "musca_b1": "MUSCA_B1",
21 "musca_s1": "MUSCA_S1"]
22
23mapCompiler = ["toolchain_GNUARM.cmake": "GNUARM",
24 "toolchain_ARMCLANG.cmake": "ARMCLANG"]
25
26mapBL2 = ["True": "--bl2",
27 "False": ""]
28
29mapTestPsaApi = ["OFF": "",
30 "INTERNAL_TRUSTED_STORAGE": "ITS",
31 "PROTECTED_STORAGE": "PS",
32 "CRYPTO": "Crypto",
33 "INITIAL_ATTESTATION": "Attest",
34 "IPC": "FF"]
35
36def generateLavaParam(build_params) {
37 def params = []
38 params += string(name: "TARGET_PLATFORM", \
39 value: mapPlatform[build_params["TFM_PLATFORM"]])
40 params += string(name: "COMPILER", \
41 value: mapCompiler[build_params["TOOLCHAIN_FILE"]])
42 params += string(name: "PSA_API_SUITE", \
43 value: mapTestPsaApi[build_params["TEST_PSA_API"]])
44
45 if (build_params["BL2"] == "True" && \
46 build_params["NS"] == "True" && \
47 build_params["PSA_API"] == "False" && \
48 build_params["ISOLATION_LEVEL"] == "1" && \
49 build_params["TEST_REGRESSION"] == "False" && \
50 build_params["TEST_PSA_API"] == "OFF" && \
51 build_params["PROFILE"] == "N.A") {
52 params += string(name: "PROJ_CONFIG", value: "ConfigDefault")
53 }
54 else if (build_params["BL2"] == "True" && \
55 build_params["NS"] == "True" && \
56 build_params["PSA_API"] == "True" && \
57 build_params["ISOLATION_LEVEL"] == "1" && \
58 build_params["TEST_REGRESSION"] == "False" && \
59 build_params["TEST_PSA_API"] == "OFF" && \
60 build_params["PROFILE"] == "N.A") {
61 params += string(name: "PROJ_CONFIG", value: "ConfigCoreIPC")
62 }
63 else if (build_params["BL2"] == "True" && \
64 build_params["NS"] == "True" && \
65 build_params["PSA_API"] == "True" && \
66 build_params["ISOLATION_LEVEL"] == "2" && \
67 build_params["TEST_REGRESSION"] == "False" && \
68 build_params["TEST_PSA_API"] == "OFF" && \
69 build_params["PROFILE"] == "N.A") {
70 params += string(name: "PROJ_CONFIG", value: "ConfigCoreIPCTfmLevel2")
71 }
72 else if (build_params["BL2"] == "True" && \
73 build_params["NS"] == "True" && \
74 build_params["PSA_API"] == "False" && \
75 build_params["ISOLATION_LEVEL"] == "1" && \
76 build_params["PROFILE"] == "profile_small" && \
77 build_params["TEST_REGRESSION"] == "False" && \
78 build_params["TEST_PSA_API"] == "OFF") {
79 params += string(name: "PROJ_CONFIG", value: "ConfigDefaultProfileS")
80 }
81 else if (build_params["BL2"] == "True" && \
82 build_params["NS"] == "True" && \
83 build_params["PSA_API"] == "True" && \
84 build_params["ISOLATION_LEVEL"] == "2" && \
85 build_params["PROFILE"] == "profile_medium"&& \
86 build_params["TEST_REGRESSION"] == "False" && \
87 build_params["TEST_PSA_API"] == "OFF") {
88 params += string(name: "PROJ_CONFIG", value: "ConfigDefaultProfileM")
89 }
90 else if (build_params["BL2"] == "True" && \
91 build_params["NS"] == "True" && \
92 build_params["PSA_API"] == "False" && \
93 build_params["ISOLATION_LEVEL"] == "1" && \
94 build_params["TEST_REGRESSION"] == "True" && \
95 build_params["TEST_PSA_API"] == "OFF" && \
96 build_params["PROFILE"] == "N.A") {
97 params += string(name: "PROJ_CONFIG", value: "ConfigRegression")
98 }
99 else if (build_params["BL2"] == "True" && \
100 build_params["NS"] == "True" && \
101 build_params["PSA_API"] == "True" && \
102 build_params["ISOLATION_LEVEL"] == "1" && \
103 build_params["TEST_REGRESSION"] == "True" && \
104 build_params["TEST_PSA_API"] == "OFF" && \
105 build_params["PROFILE"] == "N.A") {
106 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionIPC")
107 }
108 else if (build_params["BL2"] == "True" && \
109 build_params["NS"] == "True" && \
110 build_params["PSA_API"] == "True" && \
111 build_params["ISOLATION_LEVEL"] == "2" && \
112 build_params["TEST_REGRESSION"] == "True" && \
113 build_params["TEST_PSA_API"] == "OFF" && \
114 build_params["PROFILE"] == "N.A") {
115 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionIPCTfmLevel2")
116 }
117 else if (build_params["BL2"] == "True" && \
118 build_params["NS"] == "True" && \
119 build_params["PSA_API"] == "False" && \
120 build_params["ISOLATION_LEVEL"] == "1" && \
121 build_params["PROFILE"] == "profile_small" && \
122 build_params["TEST_REGRESSION"] == "True" && \
123 build_params["TEST_PSA_API"] == "OFF") {
124 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionProfileS")
125 }
126 else if (build_params["BL2"] == "True" && \
127 build_params["NS"] == "True" && \
128 build_params["PSA_API"] == "True" && \
129 build_params["ISOLATION_LEVEL"] == "2" && \
130 build_params["PROFILE"] == "profile_medium"&& \
131 build_params["TEST_REGRESSION"] == "True" && \
132 build_params["TEST_PSA_API"] == "OFF") {
133 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionProfileM")
134 }
135 else if (build_params["BL2"] == "True" && \
136 build_params["NS"] == "True" && \
137 build_params["PSA_API"] == "False" && \
138 build_params["ISOLATION_LEVEL"] == "1" && \
139 build_params["TEST_REGRESSION"] == "False" && \
140 build_params["TEST_PSA_API"] != "OFF" && \
141 build_params["PROFILE"] == "N.A") {
142 params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTest")
143 }
144 else if (build_params["BL2"] == "True" && \
145 build_params["NS"] == "True" && \
146 build_params["PSA_API"] == "True" && \
147 build_params["ISOLATION_LEVEL"] == "1" && \
148 build_params["TEST_REGRESSION"] == "False" && \
149 build_params["TEST_PSA_API"] != "OFF" && \
150 build_params["PROFILE"] == "N.A") {
151 params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTestIPC")
152 }
153 else if (build_params["BL2"] == "True" && \
154 build_params["NS"] == "True" && \
155 build_params["PSA_API"] == "True" && \
156 build_params["ISOLATION_LEVEL"] == "2" && \
157 build_params["TEST_REGRESSION"] == "False" && \
158 build_params["TEST_PSA_API"] != "OFF" && \
159 build_params["PROFILE"] == "N.A") {
160 params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTestIPCTfmLevel2")
161 }
162 else {
163 params += string(name: "PROJ_CONFIG", value: "ConfigDefault")
164 }
165 return params
166}
167
168def listConfigs(ci_scripts_dir, config_list, filter_group) {
169 dir(ci_scripts_dir) {
170 echo "Obtaining list of configs."
171 echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}"
172 def build_config_list_raw = sh(script: """\
173python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}
174""", returnStdout: true).trim()
175 def build_config_list = build_config_list_raw.tokenize('\n')
176 config_list.addAll(build_config_list)
177 }
178}
179
180def buildConfig(ci_scripts_dir, config, filter_group, results) {
181 def params = []
182 def params_collection = [:]
183 def build_config_params
184 dir(ci_scripts_dir) {
185 echo "Obtaining build configuration for config ${config}"
186 echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}"
187 build_config_params = sh(script: """\
188python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}
189""", returnStdout: true).trim()
190 }
191 def lines = build_config_params.tokenize('\n')
192 for (String line : lines) {
193 def key, value
194 (key, value) = line.tokenize('=')
195 params += string(name: key, value: value)
196 params_collection[key] = value
197 }
198 params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
199 params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
200 params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
201 params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
202 params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
203 params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
204 params += string(name: 'CODE_REPO', value: env.CODE_REPO)
205 params += string(name: 'CODE_COVERAGE_EN', value: env.CODE_COVERAGE_EN)
206 params += string(name: 'TFM_TESTS_REFSPEC', value: env.TFM_TESTS_REFSPEC)
207 params += string(name: 'CI_SCRIPTS_REFSPEC', value: env.CI_SCRIPTS_REFSPEC)
Colin Thorbinson58703db2020-11-24 12:02:19 +0000208 params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800209 return { -> results
210 def build_res = build(job: 'tf-m-build-config-infra-health', parameters: params, propagate: false)
211 def build_info = [build_res, config, params_collection]
212 results['builds'][build_res.number] = build_info
213 def build_url = build_res.getAbsoluteUrl()
214 print("${build_res.number}: ${config} ${build_res.result} ${build_url}")
215 failure_states = ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]
216 if (build_res.result in failure_states) {
217 error("Build failed at ${build_url}")
218 }
219 else if (params_collection["NS"] == "False" ||
220 params_collection["PARTITION_PS"] == "OFF") {
221 print("LAVA is not needed for ${build_url}")
222 }
223 else {
224 print("Doing LAVA stuff for ${build_url}")
225 params += generateLavaParam(params_collection)
226 params += string(name: 'BUILD_NUMBER', value: "${build_res.number}")
227 params += string(name: 'BUILD_URL', value: build_url)
228 params += string(name: 'LAVA_URL', value: env.LAVA_URL)
229 params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
230 params += string(name: 'LAVA_CREDENTIALS', value: env.LAVA_CREDENTIALS)
231 def lava_res = build(job: 'tf-m-lava-submit', parameters: params, propagate: false)
232 if (lava_res.result in failure_states) {
233 error("LAVA Create and Submit failed at ${lava_res.getAbsoluteUrl()}")
234 }
235 else {
236 results['lava_jobs'] += lava_res.getDescription()
237 }
238 }
239 }
240}
241
242def buildDocs(results) {
243 def params = []
244 params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
245 params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
246 params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
247 params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
248 params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
249 params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
250 params += string(name: 'CODE_REPO', value: env.CODE_REPO)
Colin Thorbinson58703db2020-11-24 12:02:19 +0000251 params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800252 return { -> results
253 def res = build(job: 'tf-m-build-docs-infra-health', parameters: params, propagate:false)
254 print("${res.number}: Docs ${res.result} ${res.getAbsoluteUrl()}")
255 results['docs'] = [res.number, res.result, params]
256 if (res.result in ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]) {
257 error("Build failed at ${res.getAbsoluteUrl()}")
258 }
259 }
260}
261
262def emailNotification(results, stage) {
263 script {
264 if (env.JOB_NAME.equals("tf-m-nightly") && !env.EMAIL_NOTIFICATION.equals('')) {
265 def result = "Fail."
266 if (results == true) {
267 result = "Success."
268 print("Skip sending as ${result} for ${stage}")
269 }
270 else {
271 emailext (
272 subject: ("Job ${env.JOB_NAME} ${stage} ${env.BUILD_NUMBER} ${result}"),
273 body: "Check console output at ${env.BUILD_URL}",
274 to: "${EMAIL_NOTIFICATION}"
275 )
276 }
277 }
278 } /* script */
279}
280
281@NonCPS
282def generateCsvContent(results) {
283 def resultsParam = []
284 results.each { result ->
285 resultsParam.add([result.value[1], \
286 result.value[0].getResult(), \
287 result.value[2]['TARGET_PLATFORM'], \
288 result.value[2]['COMPILER'], \
289 result.value[2]['PROJ_CONFIG'], \
290 result.value[2]['CMAKE_BUILD_TYPE'], \
291 result.value[2]['BL2'], \
292 result.value[2]['PSA_API_SUITE']])
293 }
294 def configs = [] as Set
295 resultsParam.each { result ->
296 if (result[2] == 'MUSCA_B1') {
297 if (result[0].contains('_OTP_')) {
298 result[2] += '_OTP'
299 }
300 }
301 if (result[6] == 'True') {
302 result[6] = 'BL2'
303 }
304 else {
305 result[6] = 'NOBL2'
306 }
307 config = result[4]
308 if (result[7] != "''") {
309 config += ' (' + result[7] + ') '
310 }
311 configs.add(config)
312 result.add(config)
313 }
314 configs.sort()
315 def csvContent = []
316 resultsParam.each { result ->
317 def configExists = false
318 for (csvLine in csvContent) {
319 if (csvLine[0] == result[2] && \
320 csvLine[1] == result[3] && \
321 csvLine[2] == result[5] && \
322 csvLine[3] == result[6]) {
323 csvLine[4][result[8]] = result[1]
324 configExists = true
325 break
326 }
327 }
328 if (!configExists) {
329 csvContent.add([result[2], result[3], result[5], result[6], [:]])
330 csvContent.last()[4][result[8]] = result[1]
331 }
332 }
333 csvContent.sort{a,b -> a[0] <=> b[0] ?: a[1] <=> b[1] ?: a[2] <=> b[2] ?: a[3] <=> b[3]}
334 def csvTable = [['Platform', 'Compiler', 'Cmake Build Type', 'BL2']]
335 csvTable[0] += configs
336 def currentPlatform = ''
337 def currentCompiler = ''
338 def currentBuild = ''
339 csvContent.each { csvLine ->
340 // Modify CSV output format for a better layout
341 if (currentPlatform == csvLine[0]) {
342 csvTable.add([''])
343 }
344 else {
345 csvTable.add([csvLine[0]])
346 currentPlatform = csvLine[0]
347 currentCompiler = ''
348 currentBuild = ''
349 }
350 if (currentCompiler == csvLine[1]) {
351 csvTable.last().add('')
352 }
353 else {
354 csvTable.last().add(csvLine[1])
355 currentCompiler = csvLine[1]
356 currentBuild = ''
357 }
358 if (currentBuild == csvLine[2]) {
359 csvTable.last().add('')
360 }
361 else {
362 csvTable.last().add(csvLine[2])
363 currentBuild = csvLine[2]
364 }
365 csvTable.last().add(csvLine[3])
366 configs.each { config ->
367 if (csvLine[4].containsKey(config)) {
368 csvTable.last().add(csvLine[4][config])
369 }
370 else {
371 csvTable.last().add('N/A')
372 }
373 }
374 }
375 return csvTable
376}
377
378def generateBuildCsv(results) {
379 def csvContent = generateCsvContent(results)
380 node("master") {
381 writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
382 archiveArtifacts 'build_results.csv'
383 }
384}
385
386def buildCsv(results) {
387 def summary = new Summary();
388 def csvContent = summary.getBuildCsv(results)
389 node("master") {
390 writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
391 archiveArtifacts 'build_results.csv'
392 }
393}
394
395def writeSummary(results) {
396 def summary = new Summary();
397 def buildLinks = summary.getLinks(results)
398 node("master") {
399 writeFile file: "build_links.html", text: buildLinks
400 archiveArtifacts 'build_links.html'
401 }
402}
403
404def lineInString(string, match) {
405 def lines = string.split("\n")
406 def result = lines.findAll { it.contains(match) }
407 return result[0]
408}
409
410def getResult(string, match) {
411 line = lineInString(string, match)
412 a = line.split(match)[1].split(' ')
413 score = a[0]
414 if (a.size() > 1)
415 {
416 fail_text = a[1..-1].join(" ")
417 return [score, fail_text]
418 }
419 return [score, ""]
420}
421
422def submitJobsToList(results) {
423 def all_jobs = []
424 for (String result : results){
425 jobs_s = result.split('JOBS: ')
426 if (jobs_s.size() > 1) {
427 all_jobs += jobs_s[1]
428 }
429 }
430 return(all_jobs)
431}
432
433def configs = []
434def builds = [:]
435def results = [:]
436
Riku Voipio57e08752020-11-15 15:18:24 +0200437node("docker-amd64-tf-m-bionic") {
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800438 stage("Init") {
439 cleanWs()
440 dir("tf-m-ci-scripts") {
Colin Thorbinson58703db2020-11-24 12:02:19 +0000441 checkout([$class: 'GitSCM', branches: [[name: '$CI_SCRIPTS_BRANCH']], userRemoteConfigs: [[credentialsId: 'GIT_SSH_KEY', url: '$CI_SCRIPTS_REPO']]])
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800442 }
443 }
444 stage("Configs") {
445 // Populate configs
446 listConfigs('tf-m-ci-scripts', configs, env.FILTER_GROUP)
447 results['builds'] = [:]
448 results['lava_jobs'] = []
449 for (config in configs) {
450 builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP, results)
451 }
452 // builds["docs"] = buildDocs(results) # Build Docs is not necessary in Infra-Health
453 }
454}
455
456stage("Builds") {
457 def verify = 1
458 def success = true
459 try {
460 parallel(builds)
461 } catch (Exception e) {
462 print(e)
463 manager.buildFailure()
464 verify = -1
465 success = false
466 } finally {
467 print("Verifying status")
468 emailNotification(success, 'Build')
469 g = new Gerrit()
470 g.verifyStatus(verify, 'tf-m-build', 'build')
471 print("Building CSV")
472 generateBuildCsv(results['builds'])
473 writeSummary(results['builds'])
474 }
475}
476
Riku Voipio57e08752020-11-15 15:18:24 +0200477node("docker-amd64-tf-m-bionic") {
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800478 stage("Copy Docs") {
479 if (env.JOB_NAME.equals("tf-m-build-and-test")) {
480 step([$class: 'CopyArtifact', projectName: 'tf-m-build-docs',
481 selector: specific("${results['docs'][0]}"), target: './docs/',
482 optional: true])
483 archiveArtifacts artifacts: 'docs/**', allowEmptyArchive: true
484 }
485 else {
486 print("No doc copy for job: ${env.JOB_NAME}")
487 }
488 }
489 stage("Tests") {
490 dir("tf-m-ci-scripts") {
Colin Thorbinson58703db2020-11-24 12:02:19 +0000491 checkout([$class: 'GitSCM', branches: [[name: '$CI_SCRIPTS_BRANCH']], userRemoteConfigs: [[credentialsId: 'GIT_SSH_KEY', url: '$CI_SCRIPTS_REPO']]])
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800492 }
493 def all_jobs = []
494 def success = true
495 print("Wait for LAVA results here...")
496 try {
497 all_jobs = submitJobsToList(results['lava_jobs'])
498 if (all_jobs.size() > 0) {
499 dir("tf-m-ci-scripts") {
500 withCredentials([usernamePassword(credentialsId: env.LAVA_CREDENTIALS, passwordVariable: 'LAVA_TOKEN', usernameVariable: 'LAVA_USER')]) {
501 output = sh(script: """./lava_helper/lava_wait_jobs.py --job-ids ${all_jobs.join(",")} \
502 --lava-url ${env.LAVA_URL} --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN} \
503 --artifacts-path lava_artifacts --lava-timeout 7200 \
504 """, returnStdout: true).trim()
505 archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
506 print(output)
507 g = new Gerrit()
508 def (boot_result, boot_output) = getResult(output, 'BOOT_RESULT: ')
509 if (boot_result) {
510 g.verifyStatus(boot_result, "lava_boot", "test")
511 }
512 def (test_result, test_output) = getResult(output, 'TEST_RESULT: ')
513 if (test_result) {
514 g.verifyStatus(test_result, "lava_test", "test")
515 }
516 if (boot_result.toInteger() < 1 || test_result.toInteger() < 1) {
517 error("Marking job as failed due to failed boots: ${boot_output} or tests: ${test_output}")
518 }
519 }
520 }
521 }
522 else {
523 print("There were no LAVA jobs to test.")
524 }
525 }
526 catch (Exception e) {
527 print("ERROR: ${e}")
528 success = false
529 } finally {
530 archiveArtifacts artifacts: 'tf-m-ci-scripts/lava_artifacts/**', allowEmptyArchive: true
531 emailNotification(success, 'Test')
532 cleanWs()
533 if (!success) {
534 error("There was an Error waiting for LAVA jobs")
535 }
536 }
537 }
538}