blob: 7769c0fe1dff29901865d8c13852c2f9e97f2650 [file] [log] [blame]
Xinyu Zhangf27f6032020-11-03 15:58:24 +08001#!/usr/bin/env groovy
2//-------------------------------------------------------------------------------
3// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
4//
5// SPDX-License-Identifier: BSD-3-Clause
6//
7//-------------------------------------------------------------------------------
8
9@Library('trustedfirmware') _
10import org.trustedfirmware.Gerrit
11import org.trustedfirmware.Summary
12
13mapPlatform = ["cypress/psoc64": "psoc64",
14 "mps2/an519": "AN519",
15 "mps2/an521": "AN521",
16 "mps2/an539": "AN539",
17 "mps2/sse-200_aws": "SSE-200_AWS",
18 "mps3/an524": "AN524",
19 "musca_a": "MUSCA_A",
20 "musca_b1": "MUSCA_B1",
21 "musca_s1": "MUSCA_S1"]
22
23mapCompiler = ["toolchain_GNUARM.cmake": "GNUARM",
24 "toolchain_ARMCLANG.cmake": "ARMCLANG"]
25
26mapBL2 = ["True": "--bl2",
27 "False": ""]
28
29mapTestPsaApi = ["OFF": "",
30 "INTERNAL_TRUSTED_STORAGE": "ITS",
31 "PROTECTED_STORAGE": "PS",
32 "CRYPTO": "Crypto",
33 "INITIAL_ATTESTATION": "Attest",
34 "IPC": "FF"]
35
Xinyu Zhangdbfadae2020-12-07 14:42:59 +080036// BL2, NS, PSA_API, ISOLATION_LEVEL, TEST_REG, TEST_PSA_API, PROFILE, CONFIG_NAME
37mapConfigs = [
38 ["True", "True", "False", "1", "False", "OFF", "N.A", "Default"],
39 ["True", "True", "True", "1", "False", "OFF", "N.A", "CoreIPC"],
40 ["True", "True", "True", "2", "False", "OFF", "N.A", "CoreIPCTfmLevel2"],
41 ["True", "True", "True", "3", "False", "OFF", "N.A", "CoreIPCTfmLevel3"],
42 ["True", "True", "False", "1", "False", "OFF", "profile_small", "DefaultProfileS"],
43 ["True", "True", "True", "2", "False", "OFF", "profile_medium", "DefaultProfileM"],
44 ["True", "True", "False", "1", "True", "OFF", "N.A", "Regression"],
45 ["True", "True", "True", "1", "True", "OFF", "N.A", "RegressionIPC"],
46 ["True", "True", "True", "2", "True", "OFF", "N.A", "RegressionIPCTfmLevel2"],
47 ["True", "True", "True", "3", "True", "OFF", "N.A", "RegressionIPCTfmLevel3"],
48 ["True", "True", "False", "1", "True", "OFF", "profile_small", "RegressionProfileS"],
49 ["True", "True", "True", "2", "True", "OFF", "profile_medium", "RegressionProfileM"],
50 ["True", "True", "False", "1", "False", "INTERNAL_TRUSTED_STORAGE", "N.A", "PsaApiTest (ITS)"],
51 ["True", "True", "False", "1", "False", "PROTECTED_STORAGE", "N.A", "PsaApiTest (PS)"],
52 ["True", "True", "False", "1", "False", "CRYPTO", "N.A", "PsaApiTest (Crypto)"],
53 ["True", "True", "False", "1", "False", "INITIAL_ATTESTATION", "N.A", "PsaApiTest (Attest)"],
54 ["True", "True", "False", "1", "False", "IPC", "N.A", "PsaApiTest (FF)"],
55 ["True", "True", "True", "1", "False", "INTERNAL_TRUSTED_STORAGE", "N.A", "PsaApiTestIPC (ITS)"],
56 ["True", "True", "True", "1", "False", "PROTECTED_STORAGE", "N.A", "PsaApiTestIPC (PS)"],
57 ["True", "True", "True", "1", "False", "CRYPTO", "N.A", "PsaApiTestIPC (Crypto)"],
58 ["True", "True", "True", "1", "False", "INITIAL_ATTESTATION", "N.A", "PsaApiTestIPC (Attest)"],
59 ["True", "True", "True", "1", "False", "IPC", "N.A", "PsaApiTestIPC (FF)"],
60 ["True", "True", "True", "2", "False", "INTERNAL_TRUSTED_STORAGE", "N.A", "PsaApiTestIPCTfmLevel2 (ITS)"],
61 ["True", "True", "True", "2", "False", "PROTECTED_STORAGE", "N.A", "PsaApiTestIPCTfmLevel2 (PS)"],
62 ["True", "True", "True", "2", "False", "CRYPTO", "N.A", "PsaApiTestIPCTfmLevel2 (Crypto)"],
63 ["True", "True", "True", "2", "False", "INITIAL_ATTESTATION", "N.A", "PsaApiTestIPCTfmLevel2 (Attest)"],
64 ["True", "True", "True", "2", "False", "IPC", "N.A", "PsaApiTestIPCTfmLevel2 (FF)"],
65 ["True", "True", "True", "3", "False", "INTERNAL_TRUSTED_STORAGE", "N.A", "PsaApiTestIPCTfmLevel3 (ITS)"],
66 ["True", "True", "True", "3", "False", "PROTECTED_STORAGE", "N.A", "PsaApiTestIPCTfmLevel3 (PS)"],
67 ["True", "True", "True", "3", "False", "CRYPTO", "N.A", "PsaApiTestIPCTfmLevel3 (Crypto)"],
68 ["True", "True", "True", "3", "False", "INITIAL_ATTESTATION", "N.A", "PsaApiTestIPCTfmLevel3 (Attest)"],
69 ["True", "True", "True", "3", "False", "IPC", "N.A", "PsaApiTestIPCTfmLevel3 (FF)"],
70]
71
Xinyu Zhangf27f6032020-11-03 15:58:24 +080072def generateLavaParam(build_params) {
73 def params = []
74 params += string(name: "TARGET_PLATFORM", \
75 value: mapPlatform[build_params["TFM_PLATFORM"]])
76 params += string(name: "COMPILER", \
77 value: mapCompiler[build_params["TOOLCHAIN_FILE"]])
78 params += string(name: "PSA_API_SUITE", \
79 value: mapTestPsaApi[build_params["TEST_PSA_API"]])
80
Xinyu Zhangdbfadae2020-12-07 14:42:59 +080081 configName = "Config"
82 config_params = [build_params["BL2"], build_params["NS"], \
83 build_params["PSA_API"], build_params["ISOLATION_LEVEL"], \
84 build_params["TEST_REGRESSION"], build_params["TEST_PSA_API"], \
85 build_params["PROFILE"]]
86 for (config in mapConfigs) {
87 if (config_params == config[0..6]) {
88 configName += config[7].split(' ')[0]
89 break
90 }
Xinyu Zhangf27f6032020-11-03 15:58:24 +080091 }
Xinyu Zhangdbfadae2020-12-07 14:42:59 +080092 if (configName == "Config") {
93 configName = "ConfigDefault"
94 }
95 params += string(name: "PROJ_CONFIG", value: configName)
96
Xinyu Zhangf27f6032020-11-03 15:58:24 +080097 return params
98}
99
100def listConfigs(ci_scripts_dir, config_list, filter_group) {
101 dir(ci_scripts_dir) {
102 echo "Obtaining list of configs."
103 echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}"
104 def build_config_list_raw = sh(script: """\
105python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}
106""", returnStdout: true).trim()
107 def build_config_list = build_config_list_raw.tokenize('\n')
108 config_list.addAll(build_config_list)
109 }
110}
111
112def buildConfig(ci_scripts_dir, config, filter_group, results) {
113 def params = []
114 def params_collection = [:]
115 def build_config_params
116 dir(ci_scripts_dir) {
117 echo "Obtaining build configuration for config ${config}"
118 echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}"
119 build_config_params = sh(script: """\
120python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}
121""", returnStdout: true).trim()
122 }
123 def lines = build_config_params.tokenize('\n')
124 for (String line : lines) {
125 def key, value
126 (key, value) = line.tokenize('=')
127 params += string(name: key, value: value)
128 params_collection[key] = value
129 }
130 params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
131 params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
132 params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
133 params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
134 params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
135 params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
136 params += string(name: 'CODE_REPO', value: env.CODE_REPO)
137 params += string(name: 'CODE_COVERAGE_EN', value: env.CODE_COVERAGE_EN)
138 params += string(name: 'TFM_TESTS_REFSPEC', value: env.TFM_TESTS_REFSPEC)
139 params += string(name: 'CI_SCRIPTS_REFSPEC', value: env.CI_SCRIPTS_REFSPEC)
Colin Thorbinson58703db2020-11-24 12:02:19 +0000140 params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800141 return { -> results
142 def build_res = build(job: 'tf-m-build-config-infra-health', parameters: params, propagate: false)
143 def build_info = [build_res, config, params_collection]
144 results['builds'][build_res.number] = build_info
145 def build_url = build_res.getAbsoluteUrl()
146 print("${build_res.number}: ${config} ${build_res.result} ${build_url}")
147 failure_states = ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]
148 if (build_res.result in failure_states) {
149 error("Build failed at ${build_url}")
150 }
151 else if (params_collection["NS"] == "False" ||
152 params_collection["PARTITION_PS"] == "OFF") {
153 print("LAVA is not needed for ${build_url}")
154 }
155 else {
156 print("Doing LAVA stuff for ${build_url}")
157 params += generateLavaParam(params_collection)
158 params += string(name: 'BUILD_NUMBER', value: "${build_res.number}")
159 params += string(name: 'BUILD_URL', value: build_url)
160 params += string(name: 'LAVA_URL', value: env.LAVA_URL)
161 params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
162 params += string(name: 'LAVA_CREDENTIALS', value: env.LAVA_CREDENTIALS)
163 def lava_res = build(job: 'tf-m-lava-submit', parameters: params, propagate: false)
164 if (lava_res.result in failure_states) {
165 error("LAVA Create and Submit failed at ${lava_res.getAbsoluteUrl()}")
166 }
167 else {
168 results['lava_jobs'] += lava_res.getDescription()
169 }
170 }
171 }
172}
173
174def buildDocs(results) {
175 def params = []
176 params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
177 params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
178 params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
179 params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
180 params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
181 params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
182 params += string(name: 'CODE_REPO', value: env.CODE_REPO)
Colin Thorbinson58703db2020-11-24 12:02:19 +0000183 params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800184 return { -> results
185 def res = build(job: 'tf-m-build-docs-infra-health', parameters: params, propagate:false)
186 print("${res.number}: Docs ${res.result} ${res.getAbsoluteUrl()}")
187 results['docs'] = [res.number, res.result, params]
188 if (res.result in ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]) {
189 error("Build failed at ${res.getAbsoluteUrl()}")
190 }
191 }
192}
193
194def emailNotification(results, stage) {
195 script {
196 if (env.JOB_NAME.equals("tf-m-nightly") && !env.EMAIL_NOTIFICATION.equals('')) {
197 def result = "Fail."
198 if (results == true) {
199 result = "Success."
200 print("Skip sending as ${result} for ${stage}")
201 }
202 else {
203 emailext (
204 subject: ("Job ${env.JOB_NAME} ${stage} ${env.BUILD_NUMBER} ${result}"),
205 body: "Check console output at ${env.BUILD_URL}",
206 to: "${EMAIL_NOTIFICATION}"
207 )
208 }
209 }
210 } /* script */
211}
212
213@NonCPS
214def generateCsvContent(results) {
215 def resultsParam = []
216 results.each { result ->
217 resultsParam.add([result.value[1], \
218 result.value[0].getResult(), \
219 result.value[2]['TARGET_PLATFORM'], \
220 result.value[2]['COMPILER'], \
221 result.value[2]['PROJ_CONFIG'], \
222 result.value[2]['CMAKE_BUILD_TYPE'], \
223 result.value[2]['BL2'], \
224 result.value[2]['PSA_API_SUITE']])
225 }
226 def configs = [] as Set
227 resultsParam.each { result ->
228 if (result[2] == 'MUSCA_B1') {
229 if (result[0].contains('_OTP_')) {
230 result[2] += '_OTP'
231 }
232 }
233 if (result[6] == 'True') {
234 result[6] = 'BL2'
235 }
236 else {
237 result[6] = 'NOBL2'
238 }
239 config = result[4]
240 if (result[7] != "''") {
241 config += ' (' + result[7] + ') '
242 }
243 configs.add(config)
244 result.add(config)
245 }
246 configs.sort()
247 def csvContent = []
248 resultsParam.each { result ->
249 def configExists = false
250 for (csvLine in csvContent) {
251 if (csvLine[0] == result[2] && \
252 csvLine[1] == result[3] && \
253 csvLine[2] == result[5] && \
254 csvLine[3] == result[6]) {
255 csvLine[4][result[8]] = result[1]
256 configExists = true
257 break
258 }
259 }
260 if (!configExists) {
261 csvContent.add([result[2], result[3], result[5], result[6], [:]])
262 csvContent.last()[4][result[8]] = result[1]
263 }
264 }
265 csvContent.sort{a,b -> a[0] <=> b[0] ?: a[1] <=> b[1] ?: a[2] <=> b[2] ?: a[3] <=> b[3]}
266 def csvTable = [['Platform', 'Compiler', 'Cmake Build Type', 'BL2']]
267 csvTable[0] += configs
268 def currentPlatform = ''
269 def currentCompiler = ''
270 def currentBuild = ''
271 csvContent.each { csvLine ->
272 // Modify CSV output format for a better layout
273 if (currentPlatform == csvLine[0]) {
274 csvTable.add([''])
275 }
276 else {
277 csvTable.add([csvLine[0]])
278 currentPlatform = csvLine[0]
279 currentCompiler = ''
280 currentBuild = ''
281 }
282 if (currentCompiler == csvLine[1]) {
283 csvTable.last().add('')
284 }
285 else {
286 csvTable.last().add(csvLine[1])
287 currentCompiler = csvLine[1]
288 currentBuild = ''
289 }
290 if (currentBuild == csvLine[2]) {
291 csvTable.last().add('')
292 }
293 else {
294 csvTable.last().add(csvLine[2])
295 currentBuild = csvLine[2]
296 }
297 csvTable.last().add(csvLine[3])
298 configs.each { config ->
299 if (csvLine[4].containsKey(config)) {
300 csvTable.last().add(csvLine[4][config])
301 }
302 else {
303 csvTable.last().add('N/A')
304 }
305 }
306 }
307 return csvTable
308}
309
310def generateBuildCsv(results) {
311 def csvContent = generateCsvContent(results)
312 node("master") {
313 writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
314 archiveArtifacts 'build_results.csv'
315 }
316}
317
318def buildCsv(results) {
319 def summary = new Summary();
320 def csvContent = summary.getBuildCsv(results)
321 node("master") {
322 writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
323 archiveArtifacts 'build_results.csv'
324 }
325}
326
327def writeSummary(results) {
328 def summary = new Summary();
329 def buildLinks = summary.getLinks(results)
330 node("master") {
331 writeFile file: "build_links.html", text: buildLinks
332 archiveArtifacts 'build_links.html'
333 }
334}
335
336def lineInString(string, match) {
337 def lines = string.split("\n")
338 def result = lines.findAll { it.contains(match) }
339 return result[0]
340}
341
342def getResult(string, match) {
343 line = lineInString(string, match)
344 a = line.split(match)[1].split(' ')
345 score = a[0]
346 if (a.size() > 1)
347 {
348 fail_text = a[1..-1].join(" ")
349 return [score, fail_text]
350 }
351 return [score, ""]
352}
353
354def submitJobsToList(results) {
355 def all_jobs = []
356 for (String result : results){
357 jobs_s = result.split('JOBS: ')
358 if (jobs_s.size() > 1) {
359 all_jobs += jobs_s[1]
360 }
361 }
362 return(all_jobs)
363}
364
365def configs = []
366def builds = [:]
367def results = [:]
368
Riku Voipio57e08752020-11-15 15:18:24 +0200369node("docker-amd64-tf-m-bionic") {
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800370 stage("Init") {
371 cleanWs()
372 dir("tf-m-ci-scripts") {
Colin Thorbinson58703db2020-11-24 12:02:19 +0000373 checkout([$class: 'GitSCM', branches: [[name: '$CI_SCRIPTS_BRANCH']], userRemoteConfigs: [[credentialsId: 'GIT_SSH_KEY', url: '$CI_SCRIPTS_REPO']]])
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800374 }
375 }
376 stage("Configs") {
377 // Populate configs
378 listConfigs('tf-m-ci-scripts', configs, env.FILTER_GROUP)
379 results['builds'] = [:]
380 results['lava_jobs'] = []
381 for (config in configs) {
382 builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP, results)
383 }
384 // builds["docs"] = buildDocs(results) # Build Docs is not necessary in Infra-Health
385 }
386}
387
388stage("Builds") {
389 def verify = 1
390 def success = true
391 try {
392 parallel(builds)
393 } catch (Exception e) {
394 print(e)
395 manager.buildFailure()
396 verify = -1
397 success = false
398 } finally {
399 print("Verifying status")
400 emailNotification(success, 'Build')
401 g = new Gerrit()
402 g.verifyStatus(verify, 'tf-m-build', 'build')
403 print("Building CSV")
404 generateBuildCsv(results['builds'])
405 writeSummary(results['builds'])
406 }
407}
408
Riku Voipio57e08752020-11-15 15:18:24 +0200409node("docker-amd64-tf-m-bionic") {
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800410 stage("Copy Docs") {
411 if (env.JOB_NAME.equals("tf-m-build-and-test")) {
412 step([$class: 'CopyArtifact', projectName: 'tf-m-build-docs',
413 selector: specific("${results['docs'][0]}"), target: './docs/',
414 optional: true])
415 archiveArtifacts artifacts: 'docs/**', allowEmptyArchive: true
416 }
417 else {
418 print("No doc copy for job: ${env.JOB_NAME}")
419 }
420 }
421 stage("Tests") {
422 dir("tf-m-ci-scripts") {
Colin Thorbinson58703db2020-11-24 12:02:19 +0000423 checkout([$class: 'GitSCM', branches: [[name: '$CI_SCRIPTS_BRANCH']], userRemoteConfigs: [[credentialsId: 'GIT_SSH_KEY', url: '$CI_SCRIPTS_REPO']]])
Xinyu Zhangf27f6032020-11-03 15:58:24 +0800424 }
425 def all_jobs = []
426 def success = true
427 print("Wait for LAVA results here...")
428 try {
429 all_jobs = submitJobsToList(results['lava_jobs'])
430 if (all_jobs.size() > 0) {
431 dir("tf-m-ci-scripts") {
432 withCredentials([usernamePassword(credentialsId: env.LAVA_CREDENTIALS, passwordVariable: 'LAVA_TOKEN', usernameVariable: 'LAVA_USER')]) {
433 output = sh(script: """./lava_helper/lava_wait_jobs.py --job-ids ${all_jobs.join(",")} \
434 --lava-url ${env.LAVA_URL} --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN} \
435 --artifacts-path lava_artifacts --lava-timeout 7200 \
436 """, returnStdout: true).trim()
437 archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
438 print(output)
439 g = new Gerrit()
440 def (boot_result, boot_output) = getResult(output, 'BOOT_RESULT: ')
441 if (boot_result) {
442 g.verifyStatus(boot_result, "lava_boot", "test")
443 }
444 def (test_result, test_output) = getResult(output, 'TEST_RESULT: ')
445 if (test_result) {
446 g.verifyStatus(test_result, "lava_test", "test")
447 }
448 if (boot_result.toInteger() < 1 || test_result.toInteger() < 1) {
449 error("Marking job as failed due to failed boots: ${boot_output} or tests: ${test_output}")
450 }
451 }
452 }
453 }
454 else {
455 print("There were no LAVA jobs to test.")
456 }
457 }
458 catch (Exception e) {
459 print("ERROR: ${e}")
460 success = false
461 } finally {
462 archiveArtifacts artifacts: 'tf-m-ci-scripts/lava_artifacts/**', allowEmptyArchive: true
463 emailNotification(success, 'Test')
464 cleanWs()
465 if (!success) {
466 error("There was an Error waiting for LAVA jobs")
467 }
468 }
469 }
470}