blob: 9d4a43406aae06e6091c76e7e2ff77600d614084 [file] [log] [blame]
#!/usr/bin/env groovy
//-------------------------------------------------------------------------------
// Copyright (c) 2020-2022, Arm Limited and Contributors. All rights reserved.
//
// SPDX-License-Identifier: BSD-3-Clause
//
//-------------------------------------------------------------------------------
@Library('trustedfirmware') _
import org.trustedfirmware.Gerrit
import org.trustedfirmware.Summary
failure_states = ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]
cfgSkipFVP = [
"AN519_GCC_IPC_2_REG_Debug_BL2",
"AN519_GCC_IPC_2_REG_Debug_BL2_MEDIUM",
"AN519_GCC_IPC_2_REG_Debug_BL2_MEDIUM_PSOFF",
"AN519_ARMCLANG_IPC_2_REG_Debug_BL2",
"AN519_ARMCLANG_IPC_2_REG_Debug_BL2_MEDIUM",
"AN521_ARMCLANG_IPC_2_REG_Debug_BL2",
"AN521_ARMCLANG_IPC_2_REG_Debug_BL2_NSCE",
"AN521_ARMCLANG_IPC_2_REG_Debug_BL2_MEDIUM",
]
def submit_lava_tests(config, results, build_res, params, params_collection) {
print("Doing LAVA stuff for ${build_res.getAbsoluteUrl()}")
params += string(name: 'BUILD_NUMBER', value: "${build_res.number}")
params += string(name: 'BUILD_URL', value: build_res.getAbsoluteUrl())
params += string(name: 'LAVA_URL', value: env.LAVA_URL)
params += string(name: 'CI_SCRIPTS_REPO', value: env.CI_SCRIPTS_REPO)
params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
params += string(name: 'LAVA_CREDENTIALS', value: env.LAVA_CREDENTIALS)
params += string(name: 'CODE_COVERAGE_EN', value: env.CODE_COVERAGE_EN)
// Workaround: Configs in cfgSkipFVP fail on FVP but pass on physical boards
if (params_collection['CONFIG_NAME'] in cfgSkipFVP) {
params += string(name: 'DEVICE_FILTER', value: "--physical-board-only")
} else {
params += string(name: 'DEVICE_FILTER', value: env.DEVICE_FILTER)
}
def lava_res = build(job: 'tf-m-lava-submit', parameters: params, propagate: false)
def lava_resubmitted = false
if (lava_res.result in failure_states) {
error("LAVA Create and Submit failed at ${lava_res.getAbsoluteUrl()}")
} else {
lava_des = lava_res.getDescription()
if (lava_des.contains(" Submitted twice!")) {
lava_resubmitted = true
lava_des = lava_des - " Submitted twice!"
}
results['lava_jobs'] += lava_des
}
links = "Build Config: ${config}\n"
links += "Build URL: ${build_res.getAbsoluteUrl()}\n"
links += "LAVA Submit: ${lava_res.getAbsoluteUrl()}"
if (lava_resubmitted) {
links += "\nLAVA Job Re-Submitted!"
}
print(links)
}
def listConfigs(ci_scripts_dir, config_list, filter_group) {
dir(ci_scripts_dir) {
echo "Obtaining list of configs."
echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}"
def build_config_list_raw = sh(script: """\
python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}
""", returnStdout: true).trim()
def build_config_list = build_config_list_raw.tokenize('\n')
config_list.addAll(build_config_list)
}
}
def buildConfig(ci_scripts_dir, config, filter_group, results) {
def params = []
def params_collection = [:]
def build_config_params
dir(ci_scripts_dir) {
echo "Obtaining build configuration for config ${config}"
echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}"
build_config_params = sh(script: """\
python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}
""", returnStdout: true).trim()
}
def lines = build_config_params.tokenize('\n')
for (String line : lines) {
def key, value
(key, value) = line.tokenize('=')
params += string(name: key, value: value)
params_collection[key] = value
}
params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
params += string(name: 'CODE_REPO', value: env.CODE_REPO)
params += string(name: 'CODE_COVERAGE_EN', value: env.CODE_COVERAGE_EN)
params += string(name: 'CI_SCRIPTS_REPO', value: env.CI_SCRIPTS_REPO)
params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
params += string(name: 'MCUBOOT_REFSPEC', value: env.MCUBOOT_REFSPEC)
params += string(name: 'MCUBOOT_URL', value: env.MCUBOOT_URL)
params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
params += string(name: 'MBEDTLS_URL', value: env.MBEDTLS_URL)
params += string(name: 'TFM_TESTS_REFSPEC', value: env.TFM_TESTS_REFSPEC)
params += string(name: 'TFM_TESTS_URL', value: env.TFM_TESTS_URL)
params += string(name: 'PSA_ARCH_TESTS_VERSION', value: env.PSA_ARCH_TESTS_VERSION)
params += string(name: 'PSA_ARCH_TESTS_URL', value: env.PSA_ARCH_TESTS_URL)
params += string(name: 'SHARE_FOLDER', value: env.SHARE_FOLDER)
if (env.JOB_NAME.equals("tf-m-nightly")) { //Setting the Memory footprint gathering.
params += string(name: 'SQUAD_CONFIGURATIONS', value: env.SQUAD_CONFIGURATIONS)
}
return { -> results
def build_res = build(job: 'tf-m-build-config', parameters: params, propagate: false)
def build_url = build_res.getAbsoluteUrl()
results['builds'][build_res.number] = [build_res, config, params_collection]
print("${build_res.number}: ${config} ${build_res.result} ${build_url}")
// Filter out configs do not need LAVA tests
// Configs with build failure do not need LAVA tests
if (build_res.result in failure_states) {
error("Build failed at ${build_url}")
} else {
// Build successful
// Job tf-m-extra-build does not need LAVA tests
if (env.JOB_NAME.equals("tf-m-extra-build")) {
print("LAVA is not needed in tf-m-extra-build job.")
}
// Submit LAVA tests
else {
submit_lava_tests(config, results, build_res, params, params_collection)
}
}
}
}
def buildDocs(results) {
def params = []
params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
params += string(name: 'CODE_REPO', value: env.CODE_REPO)
params += string(name: 'CI_SCRIPTS_REPO', value: env.CI_SCRIPTS_REPO)
params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
params += string(name: 'SHARE_FOLDER', value: env.SHARE_FOLDER)
return { -> results
def res = build(job: 'tf-m-build-docs', parameters: params, propagate:false)
print("${res.number}: Docs ${res.result} ${res.getAbsoluteUrl()}")
results['docs'] = [res.number, res.result, params]
if (res.result in ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]) {
error("Build failed at ${res.getAbsoluteUrl()}")
}
}
}
def generateEmailBody(stage, failed_jobs) {
body = "Check console output at ${env.BUILD_URL} \n\n"
body += "Failed Jobs:\n"
failed_jobs.each { job ->
body += "${job.key} ${job.value}\n"
}
body += "\nFor detailed ${stage} results please refer to \
${env.BUILD_URL}artifact/${stage}_results.csv \n"
return body
}
def emailNotification(results, stage, failed_jobs) {
script {
if (env.EMAIL_NOTIFICATION) {
def result = "Fail."
if (results == true) {
result = "Success."
print("Skip sending as ${result} for ${stage}")
}
else {
emailext (
subject: ("Job ${env.JOB_NAME} ${stage} ${env.BUILD_NUMBER} ${result}"),
body: generateEmailBody(stage, failed_jobs),
to: "${EMAIL_NOTIFICATION}"
)
}
}
} /* script */
}
def filterFailedBuild(results) {
def failed_builds = [:]
results.each { result ->
if (result.value[0].getResult() == "FAILURE") {
failed_builds[result.value[1]] = result.value[0].getAbsoluteUrl()
}
}
return failed_builds
}
def parseTestResults(output) {
// Verify test status
g = new Gerrit()
if (output.contains('FAILURE')) {
score = -1
} else {
score = 1
}
g.verifyStatus(score, "lava_test", "test")
if (score < 0) {
error("Marking job as failed due to failed boots: \"${boot_output}\" or tests: \"${test_output}\"")
}
// Generate test results summary
def test_results = [:]
records = output.split('\nLAVA Test Config:\n')
if (records.size() < 2) {
return test_results
}
records[1..-1].each { record ->
config_name = ""
metadata = [:]
record.split('\n').each { line ->
record_metadata = line.split(': ')
if (record_metadata[0] == 'Config Name') {
config_name = record_metadata[1]
} else {
metadata[record_metadata[0]] = record_metadata[1]
}
}
test_results[config_name] = metadata
}
return test_results
}
def filterFailedTest(string) {
def failed_tests = [:]
line = lineInString(string, "FAILURE_TESTS:")
if (line == null) {
return ["???"];
}
a = line.split(' ')
if (a.size() > 1) {
a = line.split(' ')[1..-1]
a.each { fail_test ->
config_link = fail_test.split(':')
failed_tests[config_link[0]] = config_link[1..-1].join(':')
}
}
return failed_tests
}
@NonCPS
def generateCsvContent(results) {
def resultsParam = []
results.each { result ->
if (result.value[2]['BL2'] == "True") {
resultsParam.add([result.value[1], \
result.value[0].getResult(), \
result.value[2]['TFM_PLATFORM'], \
result.value[2]['COMPILER'].split('_')[0], \
result.value[2]['CMAKE_BUILD_TYPE'], \
result.value[2]['BL2'], \
result.value[2]['LIB_MODEL'], \
result.value[2]['ISOLATION_LEVEL'], \
result.value[2]['TEST_REGRESSION'], \
result.value[2]['TEST_PSA_API'], \
result.value[2]['PROFILE']])
}
}
resultsParam.each { result ->
result[3] = result[3].split('_')[0]
build_params = result[6..10]
configName = ""
for (map_cfg in mapConfigs) {
if (build_params[0..4] == map_cfg[0..4]) {
configName = map_cfg[5]
break
}
}
if (configName == "") {
configName = "Default"
}
else if (configName == "RegressionProfileM") {
if (build_params[5] == "OFF") {
configName = "RegressionProfileM PSOFF"
}
}
result.add(configName)
}
def csvContent = []
resultsParam.each { result ->
current_row = result[2..4]
cfgs.each {cfg ->
if (cfg == result[11]) {
current_row.add(cfg)
current_row.add(result[1])
}
}
csvContent.add(current_row)
}
csvContent.sort{a,b -> a[0] <=> b[0] ?: a[1] <=> b[1] ?: a[2] <=> b[2]}
build_summary = []
current_platform = ""
current_compiler = ""
current_build_type = ""
csvContent.each { build_cfg ->
if (current_platform != build_cfg[0] || \
current_compiler != build_cfg[1] || \
current_build_type != build_cfg[2]) {
current_platform = build_cfg[0]
current_compiler = build_cfg[1]
current_build_type = build_cfg[2]
csv_line = [current_platform, current_compiler, current_build_type]
cfgs.each {
csv_line.add("N.A.")
}
build_summary.add(csv_line)
}
i = 0
cfgs.each { cfg ->
if (cfg == build_cfg[3]) {
build_summary[-1][3+i] = build_cfg[4]
}
i += 1
}
}
build_summary.add(0, ['Platform', 'Compiler', 'Cmake Build Type'])
build_summary[0] += cfgs
return build_summary
}
def generateBuildCsv(results) {
def csvContent = generateCsvContent(results)
node("master") {
writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
archiveArtifacts 'build_results.csv'
}
}
def buildCsv(results) {
def summary = new Summary();
def csvContent = summary.getBuildCsv(results)
node("master") {
writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
archiveArtifacts 'build_results.csv'
}
}
def writeSummary(results) {
def summary = new Summary();
def buildLinks = summary.getLinks(results)
node("master") {
writeFile file: "build_links.html", text: buildLinks
archiveArtifacts 'build_links.html'
}
}
def lineInString(string, match) {
def lines = string.split("\n")
def result = lines.findAll { it.contains(match) }
return result[0]
}
def getResult(string, match) {
line = lineInString(string, match)
a = line.split(match)[1].split(' ')
score = a[0]
if (a.size() > 1)
{
fail_text = a[1..-1].join(" ")
return [score, fail_text]
}
return [score, ""]
}
def submitJobsToList(results) {
def all_jobs = []
for (String result : results){
jobs_s = result.split('JOBS: ')
if (jobs_s.size() > 1) {
all_jobs += jobs_s[1]
}
}
return(all_jobs)
}
def configs = []
def builds = [:]
def results = [:]
timestamps {
node("docker-amd64-tf-m-bionic") {
stage("Init") {
cleanWs()
dir("tf-m-ci-scripts") {
checkout([$class: 'GitSCM', branches: [[name: '$CI_SCRIPTS_BRANCH']], userRemoteConfigs: [[credentialsId: 'GIT_SSH_KEY', url: '$CI_SCRIPTS_REPO']]])
sh "git rev-parse --short HEAD"
// Clone TF-M repositories so share folder can be reused by downstream jobs
sh "./clone.sh"
}
}
stage("Configs") {
// Populate configs
listConfigs('tf-m-ci-scripts', configs, env.FILTER_GROUP)
results['builds'] = [:]
results['lava_jobs'] = []
for (config in configs) {
builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP, results)
}
if (!env.JOB_NAME.equals("tf-m-extra-build")) {
builds["docs"] = buildDocs(results)
}
}
stage("Builds") {
def verify = 1
def success = true
try {
parallel(builds)
} catch (Exception e) {
print(e)
manager.buildFailure()
verify = -1
success = false
} finally {
print("Verifying status")
def failed_builds = filterFailedBuild(results['builds'])
emailNotification(success, 'build', failed_builds)
g = new Gerrit()
g.verifyStatus(verify, 'tf-m-build', 'build')
print("Building CSV")
generateBuildCsv(results['builds'])
writeSummary(results['builds'])
}
}
stage("Tests") {
dir("tf-m-ci-scripts") {
checkout([$class: 'GitSCM', branches: [[name: '$CI_SCRIPTS_BRANCH']], userRemoteConfigs: [[credentialsId: 'GIT_SSH_KEY', url: '$CI_SCRIPTS_REPO']]])
}
def all_jobs = []
def success = true
print("Wait for LAVA results here...")
try {
all_jobs = submitJobsToList(results['lava_jobs'])
output = ""
if (all_jobs.size() > 0) {
dir(".") {
withCredentials([usernamePassword(credentialsId: env.LAVA_CREDENTIALS, passwordVariable: 'LAVA_TOKEN', usernameVariable: 'LAVA_USER')]) {
output = sh(script: """./tf-m-ci-scripts/lava_helper/lava_wait_jobs.py --job-ids ${all_jobs.join(",")} \
--lava-url ${env.LAVA_URL} --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN} \
--artifacts-path cfgs --lava-timeout 12000 \
""", returnStdout: true).trim()
println("--- output from lava_wait_jobs.py ---")
println(output)
println("--- end of output from lava_wait_jobs.py ---")
parseTestResults(output)
archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
archiveArtifacts artifacts: 'test_results.csv', allowEmptyArchive: true
if (env.CODE_COVERAGE_EN == "TRUE") {
println("Producing merged report")
sh(script: """./tf-m-ci-scripts/lava_helper/codecov_merge.sh""")
archiveArtifacts artifacts: 'merged_report/**', allowEmptyArchive: true
}
}
}
}
else {
print("There were no LAVA jobs to test.")
}
}
catch (Exception e) {
print("ERROR: ${e}")
success = false
} finally {
archiveArtifacts artifacts: 'cfgs/**', allowEmptyArchive: true
if (all_jobs.size() > 0) {
emailNotification(success, 'test', filterFailedTest(output))
}
cleanWs()
if (!success) {
error("There was an Error waiting for LAVA jobs")
}
}
}
}
}