blob: 5d84b94f015657a6303a7f108505e6417afb1aff [file] [log] [blame]
Xinyu Zhangf27f6032020-11-03 15:58:24 +08001#!/usr/bin/env groovy
2//-------------------------------------------------------------------------------
3// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
4//
5// SPDX-License-Identifier: BSD-3-Clause
6//
7//-------------------------------------------------------------------------------
8
9@Library('trustedfirmware') _
10import org.trustedfirmware.Gerrit
11import org.trustedfirmware.Summary
12
13mapPlatform = ["cypress/psoc64": "psoc64",
14 "mps2/an519": "AN519",
15 "mps2/an521": "AN521",
16 "mps2/an539": "AN539",
17 "mps2/sse-200_aws": "SSE-200_AWS",
18 "mps3/an524": "AN524",
19 "musca_a": "MUSCA_A",
20 "musca_b1": "MUSCA_B1",
21 "musca_s1": "MUSCA_S1"]
22
23mapCompiler = ["toolchain_GNUARM.cmake": "GNUARM",
24 "toolchain_ARMCLANG.cmake": "ARMCLANG"]
25
26mapBL2 = ["True": "--bl2",
27 "False": ""]
28
29mapTestPsaApi = ["OFF": "",
30 "INTERNAL_TRUSTED_STORAGE": "ITS",
31 "PROTECTED_STORAGE": "PS",
32 "CRYPTO": "Crypto",
33 "INITIAL_ATTESTATION": "Attest",
34 "IPC": "FF"]
35
36def generateLavaParam(build_params) {
37 def params = []
38 params += string(name: "TARGET_PLATFORM", \
39 value: mapPlatform[build_params["TFM_PLATFORM"]])
40 params += string(name: "COMPILER", \
41 value: mapCompiler[build_params["TOOLCHAIN_FILE"]])
42 params += string(name: "PSA_API_SUITE", \
43 value: mapTestPsaApi[build_params["TEST_PSA_API"]])
44
45 if (build_params["BL2"] == "True" && \
46 build_params["NS"] == "True" && \
47 build_params["PSA_API"] == "False" && \
48 build_params["ISOLATION_LEVEL"] == "1" && \
49 build_params["TEST_REGRESSION"] == "False" && \
50 build_params["TEST_PSA_API"] == "OFF" && \
51 build_params["PROFILE"] == "N.A") {
52 params += string(name: "PROJ_CONFIG", value: "ConfigDefault")
53 }
54 else if (build_params["BL2"] == "True" && \
55 build_params["NS"] == "True" && \
56 build_params["PSA_API"] == "True" && \
57 build_params["ISOLATION_LEVEL"] == "1" && \
58 build_params["TEST_REGRESSION"] == "False" && \
59 build_params["TEST_PSA_API"] == "OFF" && \
60 build_params["PROFILE"] == "N.A") {
61 params += string(name: "PROJ_CONFIG", value: "ConfigCoreIPC")
62 }
63 else if (build_params["BL2"] == "True" && \
64 build_params["NS"] == "True" && \
65 build_params["PSA_API"] == "True" && \
66 build_params["ISOLATION_LEVEL"] == "2" && \
67 build_params["TEST_REGRESSION"] == "False" && \
68 build_params["TEST_PSA_API"] == "OFF" && \
69 build_params["PROFILE"] == "N.A") {
70 params += string(name: "PROJ_CONFIG", value: "ConfigCoreIPCTfmLevel2")
71 }
72 else if (build_params["BL2"] == "True" && \
73 build_params["NS"] == "True" && \
74 build_params["PSA_API"] == "False" && \
75 build_params["ISOLATION_LEVEL"] == "1" && \
76 build_params["PROFILE"] == "profile_small" && \
77 build_params["TEST_REGRESSION"] == "False" && \
78 build_params["TEST_PSA_API"] == "OFF") {
79 params += string(name: "PROJ_CONFIG", value: "ConfigDefaultProfileS")
80 }
81 else if (build_params["BL2"] == "True" && \
82 build_params["NS"] == "True" && \
83 build_params["PSA_API"] == "True" && \
84 build_params["ISOLATION_LEVEL"] == "2" && \
85 build_params["PROFILE"] == "profile_medium"&& \
86 build_params["TEST_REGRESSION"] == "False" && \
87 build_params["TEST_PSA_API"] == "OFF") {
88 params += string(name: "PROJ_CONFIG", value: "ConfigDefaultProfileM")
89 }
90 else if (build_params["BL2"] == "True" && \
91 build_params["NS"] == "True" && \
92 build_params["PSA_API"] == "False" && \
93 build_params["ISOLATION_LEVEL"] == "1" && \
94 build_params["TEST_REGRESSION"] == "True" && \
95 build_params["TEST_PSA_API"] == "OFF" && \
96 build_params["PROFILE"] == "N.A") {
97 params += string(name: "PROJ_CONFIG", value: "ConfigRegression")
98 }
99 else if (build_params["BL2"] == "True" && \
100 build_params["NS"] == "True" && \
101 build_params["PSA_API"] == "True" && \
102 build_params["ISOLATION_LEVEL"] == "1" && \
103 build_params["TEST_REGRESSION"] == "True" && \
104 build_params["TEST_PSA_API"] == "OFF" && \
105 build_params["PROFILE"] == "N.A") {
106 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionIPC")
107 }
108 else if (build_params["BL2"] == "True" && \
109 build_params["NS"] == "True" && \
110 build_params["PSA_API"] == "True" && \
111 build_params["ISOLATION_LEVEL"] == "2" && \
112 build_params["TEST_REGRESSION"] == "True" && \
113 build_params["TEST_PSA_API"] == "OFF" && \
114 build_params["PROFILE"] == "N.A") {
115 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionIPCTfmLevel2")
116 }
117 else if (build_params["BL2"] == "True" && \
118 build_params["NS"] == "True" && \
119 build_params["PSA_API"] == "False" && \
120 build_params["ISOLATION_LEVEL"] == "1" && \
121 build_params["PROFILE"] == "profile_small" && \
122 build_params["TEST_REGRESSION"] == "True" && \
123 build_params["TEST_PSA_API"] == "OFF") {
124 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionProfileS")
125 }
126 else if (build_params["BL2"] == "True" && \
127 build_params["NS"] == "True" && \
128 build_params["PSA_API"] == "True" && \
129 build_params["ISOLATION_LEVEL"] == "2" && \
130 build_params["PROFILE"] == "profile_medium"&& \
131 build_params["TEST_REGRESSION"] == "True" && \
132 build_params["TEST_PSA_API"] == "OFF") {
133 params += string(name: "PROJ_CONFIG", value: "ConfigRegressionProfileM")
134 }
135 else if (build_params["BL2"] == "True" && \
136 build_params["NS"] == "True" && \
137 build_params["PSA_API"] == "False" && \
138 build_params["ISOLATION_LEVEL"] == "1" && \
139 build_params["TEST_REGRESSION"] == "False" && \
140 build_params["TEST_PSA_API"] != "OFF" && \
141 build_params["PROFILE"] == "N.A") {
142 params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTest")
143 }
144 else if (build_params["BL2"] == "True" && \
145 build_params["NS"] == "True" && \
146 build_params["PSA_API"] == "True" && \
147 build_params["ISOLATION_LEVEL"] == "1" && \
148 build_params["TEST_REGRESSION"] == "False" && \
149 build_params["TEST_PSA_API"] != "OFF" && \
150 build_params["PROFILE"] == "N.A") {
151 params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTestIPC")
152 }
153 else if (build_params["BL2"] == "True" && \
154 build_params["NS"] == "True" && \
155 build_params["PSA_API"] == "True" && \
156 build_params["ISOLATION_LEVEL"] == "2" && \
157 build_params["TEST_REGRESSION"] == "False" && \
158 build_params["TEST_PSA_API"] != "OFF" && \
159 build_params["PROFILE"] == "N.A") {
160 params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTestIPCTfmLevel2")
161 }
162 else {
163 params += string(name: "PROJ_CONFIG", value: "ConfigDefault")
164 }
165 return params
166}
167
168def listConfigs(ci_scripts_dir, config_list, filter_group) {
169 dir(ci_scripts_dir) {
170 echo "Obtaining list of configs."
171 echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}"
172 def build_config_list_raw = sh(script: """\
173python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}
174""", returnStdout: true).trim()
175 def build_config_list = build_config_list_raw.tokenize('\n')
176 config_list.addAll(build_config_list)
177 }
178}
179
180def buildConfig(ci_scripts_dir, config, filter_group, results) {
181 def params = []
182 def params_collection = [:]
183 def build_config_params
184 dir(ci_scripts_dir) {
185 echo "Obtaining build configuration for config ${config}"
186 echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}"
187 build_config_params = sh(script: """\
188python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}
189""", returnStdout: true).trim()
190 }
191 def lines = build_config_params.tokenize('\n')
192 for (String line : lines) {
193 def key, value
194 (key, value) = line.tokenize('=')
195 params += string(name: key, value: value)
196 params_collection[key] = value
197 }
198 params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
199 params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
200 params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
201 params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
202 params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
203 params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
204 params += string(name: 'CODE_REPO', value: env.CODE_REPO)
205 params += string(name: 'CODE_COVERAGE_EN', value: env.CODE_COVERAGE_EN)
206 params += string(name: 'TFM_TESTS_REFSPEC', value: env.TFM_TESTS_REFSPEC)
207 params += string(name: 'CI_SCRIPTS_REFSPEC', value: env.CI_SCRIPTS_REFSPEC)
208 return { -> results
209 def build_res = build(job: 'tf-m-build-config-infra-health', parameters: params, propagate: false)
210 def build_info = [build_res, config, params_collection]
211 results['builds'][build_res.number] = build_info
212 def build_url = build_res.getAbsoluteUrl()
213 print("${build_res.number}: ${config} ${build_res.result} ${build_url}")
214 failure_states = ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]
215 if (build_res.result in failure_states) {
216 error("Build failed at ${build_url}")
217 }
218 else if (params_collection["NS"] == "False" ||
219 params_collection["PARTITION_PS"] == "OFF") {
220 print("LAVA is not needed for ${build_url}")
221 }
222 else {
223 print("Doing LAVA stuff for ${build_url}")
224 params += generateLavaParam(params_collection)
225 params += string(name: 'BUILD_NUMBER', value: "${build_res.number}")
226 params += string(name: 'BUILD_URL', value: build_url)
227 params += string(name: 'LAVA_URL', value: env.LAVA_URL)
228 params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
229 params += string(name: 'LAVA_CREDENTIALS', value: env.LAVA_CREDENTIALS)
230 def lava_res = build(job: 'tf-m-lava-submit', parameters: params, propagate: false)
231 if (lava_res.result in failure_states) {
232 error("LAVA Create and Submit failed at ${lava_res.getAbsoluteUrl()}")
233 }
234 else {
235 results['lava_jobs'] += lava_res.getDescription()
236 }
237 }
238 }
239}
240
241def buildDocs(results) {
242 def params = []
243 params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
244 params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
245 params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
246 params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
247 params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
248 params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
249 params += string(name: 'CODE_REPO', value: env.CODE_REPO)
250 return { -> results
251 def res = build(job: 'tf-m-build-docs-infra-health', parameters: params, propagate:false)
252 print("${res.number}: Docs ${res.result} ${res.getAbsoluteUrl()}")
253 results['docs'] = [res.number, res.result, params]
254 if (res.result in ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]) {
255 error("Build failed at ${res.getAbsoluteUrl()}")
256 }
257 }
258}
259
260def emailNotification(results, stage) {
261 script {
262 if (env.JOB_NAME.equals("tf-m-nightly") && !env.EMAIL_NOTIFICATION.equals('')) {
263 def result = "Fail."
264 if (results == true) {
265 result = "Success."
266 print("Skip sending as ${result} for ${stage}")
267 }
268 else {
269 emailext (
270 subject: ("Job ${env.JOB_NAME} ${stage} ${env.BUILD_NUMBER} ${result}"),
271 body: "Check console output at ${env.BUILD_URL}",
272 to: "${EMAIL_NOTIFICATION}"
273 )
274 }
275 }
276 } /* script */
277}
278
279@NonCPS
280def generateCsvContent(results) {
281 def resultsParam = []
282 results.each { result ->
283 resultsParam.add([result.value[1], \
284 result.value[0].getResult(), \
285 result.value[2]['TARGET_PLATFORM'], \
286 result.value[2]['COMPILER'], \
287 result.value[2]['PROJ_CONFIG'], \
288 result.value[2]['CMAKE_BUILD_TYPE'], \
289 result.value[2]['BL2'], \
290 result.value[2]['PSA_API_SUITE']])
291 }
292 def configs = [] as Set
293 resultsParam.each { result ->
294 if (result[2] == 'MUSCA_B1') {
295 if (result[0].contains('_OTP_')) {
296 result[2] += '_OTP'
297 }
298 }
299 if (result[6] == 'True') {
300 result[6] = 'BL2'
301 }
302 else {
303 result[6] = 'NOBL2'
304 }
305 config = result[4]
306 if (result[7] != "''") {
307 config += ' (' + result[7] + ') '
308 }
309 configs.add(config)
310 result.add(config)
311 }
312 configs.sort()
313 def csvContent = []
314 resultsParam.each { result ->
315 def configExists = false
316 for (csvLine in csvContent) {
317 if (csvLine[0] == result[2] && \
318 csvLine[1] == result[3] && \
319 csvLine[2] == result[5] && \
320 csvLine[3] == result[6]) {
321 csvLine[4][result[8]] = result[1]
322 configExists = true
323 break
324 }
325 }
326 if (!configExists) {
327 csvContent.add([result[2], result[3], result[5], result[6], [:]])
328 csvContent.last()[4][result[8]] = result[1]
329 }
330 }
331 csvContent.sort{a,b -> a[0] <=> b[0] ?: a[1] <=> b[1] ?: a[2] <=> b[2] ?: a[3] <=> b[3]}
332 def csvTable = [['Platform', 'Compiler', 'Cmake Build Type', 'BL2']]
333 csvTable[0] += configs
334 def currentPlatform = ''
335 def currentCompiler = ''
336 def currentBuild = ''
337 csvContent.each { csvLine ->
338 // Modify CSV output format for a better layout
339 if (currentPlatform == csvLine[0]) {
340 csvTable.add([''])
341 }
342 else {
343 csvTable.add([csvLine[0]])
344 currentPlatform = csvLine[0]
345 currentCompiler = ''
346 currentBuild = ''
347 }
348 if (currentCompiler == csvLine[1]) {
349 csvTable.last().add('')
350 }
351 else {
352 csvTable.last().add(csvLine[1])
353 currentCompiler = csvLine[1]
354 currentBuild = ''
355 }
356 if (currentBuild == csvLine[2]) {
357 csvTable.last().add('')
358 }
359 else {
360 csvTable.last().add(csvLine[2])
361 currentBuild = csvLine[2]
362 }
363 csvTable.last().add(csvLine[3])
364 configs.each { config ->
365 if (csvLine[4].containsKey(config)) {
366 csvTable.last().add(csvLine[4][config])
367 }
368 else {
369 csvTable.last().add('N/A')
370 }
371 }
372 }
373 return csvTable
374}
375
376def generateBuildCsv(results) {
377 def csvContent = generateCsvContent(results)
378 node("master") {
379 writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
380 archiveArtifacts 'build_results.csv'
381 }
382}
383
384def buildCsv(results) {
385 def summary = new Summary();
386 def csvContent = summary.getBuildCsv(results)
387 node("master") {
388 writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
389 archiveArtifacts 'build_results.csv'
390 }
391}
392
393def writeSummary(results) {
394 def summary = new Summary();
395 def buildLinks = summary.getLinks(results)
396 node("master") {
397 writeFile file: "build_links.html", text: buildLinks
398 archiveArtifacts 'build_links.html'
399 }
400}
401
402def lineInString(string, match) {
403 def lines = string.split("\n")
404 def result = lines.findAll { it.contains(match) }
405 return result[0]
406}
407
408def getResult(string, match) {
409 line = lineInString(string, match)
410 a = line.split(match)[1].split(' ')
411 score = a[0]
412 if (a.size() > 1)
413 {
414 fail_text = a[1..-1].join(" ")
415 return [score, fail_text]
416 }
417 return [score, ""]
418}
419
420def submitJobsToList(results) {
421 def all_jobs = []
422 for (String result : results){
423 jobs_s = result.split('JOBS: ')
424 if (jobs_s.size() > 1) {
425 all_jobs += jobs_s[1]
426 }
427 }
428 return(all_jobs)
429}
430
431def configs = []
432def builds = [:]
433def results = [:]
434
435node("docker-amd64-bionic") {
436 stage("Init") {
437 cleanWs()
438 dir("tf-m-ci-scripts") {
439 git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
440 }
441 }
442 stage("Configs") {
443 // Populate configs
444 listConfigs('tf-m-ci-scripts', configs, env.FILTER_GROUP)
445 results['builds'] = [:]
446 results['lava_jobs'] = []
447 for (config in configs) {
448 builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP, results)
449 }
450 // builds["docs"] = buildDocs(results) # Build Docs is not necessary in Infra-Health
451 }
452}
453
454stage("Builds") {
455 def verify = 1
456 def success = true
457 try {
458 parallel(builds)
459 } catch (Exception e) {
460 print(e)
461 manager.buildFailure()
462 verify = -1
463 success = false
464 } finally {
465 print("Verifying status")
466 emailNotification(success, 'Build')
467 g = new Gerrit()
468 g.verifyStatus(verify, 'tf-m-build', 'build')
469 print("Building CSV")
470 generateBuildCsv(results['builds'])
471 writeSummary(results['builds'])
472 }
473}
474
475node("docker-amd64-bionic") {
476 stage("Copy Docs") {
477 if (env.JOB_NAME.equals("tf-m-build-and-test")) {
478 step([$class: 'CopyArtifact', projectName: 'tf-m-build-docs',
479 selector: specific("${results['docs'][0]}"), target: './docs/',
480 optional: true])
481 archiveArtifacts artifacts: 'docs/**', allowEmptyArchive: true
482 }
483 else {
484 print("No doc copy for job: ${env.JOB_NAME}")
485 }
486 }
487 stage("Tests") {
488 dir("tf-m-ci-scripts") {
489 git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
490 }
491 def all_jobs = []
492 def success = true
493 print("Wait for LAVA results here...")
494 try {
495 all_jobs = submitJobsToList(results['lava_jobs'])
496 if (all_jobs.size() > 0) {
497 dir("tf-m-ci-scripts") {
498 withCredentials([usernamePassword(credentialsId: env.LAVA_CREDENTIALS, passwordVariable: 'LAVA_TOKEN', usernameVariable: 'LAVA_USER')]) {
499 output = sh(script: """./lava_helper/lava_wait_jobs.py --job-ids ${all_jobs.join(",")} \
500 --lava-url ${env.LAVA_URL} --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN} \
501 --artifacts-path lava_artifacts --lava-timeout 7200 \
502 """, returnStdout: true).trim()
503 archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
504 print(output)
505 g = new Gerrit()
506 def (boot_result, boot_output) = getResult(output, 'BOOT_RESULT: ')
507 if (boot_result) {
508 g.verifyStatus(boot_result, "lava_boot", "test")
509 }
510 def (test_result, test_output) = getResult(output, 'TEST_RESULT: ')
511 if (test_result) {
512 g.verifyStatus(test_result, "lava_test", "test")
513 }
514 if (boot_result.toInteger() < 1 || test_result.toInteger() < 1) {
515 error("Marking job as failed due to failed boots: ${boot_output} or tests: ${test_output}")
516 }
517 }
518 }
519 }
520 else {
521 print("There were no LAVA jobs to test.")
522 }
523 }
524 catch (Exception e) {
525 print("ERROR: ${e}")
526 success = false
527 } finally {
528 archiveArtifacts artifacts: 'tf-m-ci-scripts/lava_artifacts/**', allowEmptyArchive: true
529 emailNotification(success, 'Test')
530 cleanWs()
531 if (!success) {
532 error("There was an Error waiting for LAVA jobs")
533 }
534 }
535 }
536}