Various CI fixes
* Output CSV on build stage
* Check more failure states in pipeline
* Allow configs.py to use multiple groups
* Add build log as artifact
* Add links to particular build configs
* Host CMSIS_5 pack file internally
* Adding mbedcrypto url as a param
* Move the LAVA job generation into a new jenkins job
* Make job_ids strings for adding to job description
Change-Id: I801a1a5d15a7f55e25477ad371e8ec59eb14fd7f
Signed-off-by: Dean Birch <dean.birch@arm.com>
diff --git a/configs.py b/configs.py
index 3b5bc7d..32283d9 100755
--- a/configs.py
+++ b/configs.py
@@ -41,13 +41,13 @@
def list_configs(group):
"""Lists available configurations"""
build_manager = get_build_manager(group)
- build_manager.print_config()
+ return build_manager.get_config()
-def print_config_environment(config, group=None):
+def print_config_environment(config, group=None, silence_stderr=False):
"""Prints particular configuration environment variables"""
build_manager = get_build_manager(group)
- build_manager.print_config_environment(config)
+ build_manager.print_config_environment(config, silence_stderr=silence_stderr)
if __name__ == "__main__":
@@ -63,14 +63,29 @@
PARSER.add_argument(
"-g",
"--group",
- default=None,
+ default=[],
+ action="append",
help="Only list configurations under a certain group. ",
choices=list(_builtin_configs.keys()),
)
ARGS = PARSER.parse_args()
- # By default print available configs
- if not ARGS.config:
- list_configs(ARGS.group)
- else:
- print_config_environment(ARGS.config, group=ARGS.group)
+ all_configs = set()
+ for group in ARGS.group:
+ # By default print available configs
+ if not ARGS.config:
+ all_configs.update(list_configs(group))
+ else:
+ try:
+ print_config_environment(ARGS.config, group=group, silence_stderr=True)
+ break
+ except SystemExit:
+ if group == ARGS.group[-1]:
+ print(
+ "Could not find configuration {} in groups {}".format(
+ ARGS.config, ARGS.group
+ )
+ )
+
+ for config in all_configs:
+ print(config)
diff --git a/jenkins/build-config.jpl b/jenkins/build-config.jpl
index d258dbd..aea0266 100644
--- a/jenkins/build-config.jpl
+++ b/jenkins/build-config.jpl
@@ -34,21 +34,23 @@
branches: [[name: 'FETCH_HEAD']],
userRemoteConfigs: [[
refspec: 'refs/tags/$MBEDCRYPTO_VERSION',
- url: 'https://github.com/ARMmbed/mbed-crypto.git'
+ url: params.MBEDCRYPTO_URL
]]
]
)
}
sh """
-wget -O cmsis.pack -q https://github.com/ARM-software/CMSIS_5/releases/download/${CMSIS_VERSION}/ARM.CMSIS.${CMSIS_VERSION}.pack
+# Host https://github.com/Arm-software/CMSIS_5/releases/download/5.5.0/ARM.CMSIS.5.5.0.pack
+# under \$JENKINS_HOME/userContent
+wget -O cmsis.pack -q \${JENKINS_URL}/userContent/ARM.CMSIS.${CMSIS_VERSION}.pack
unzip -o -d CMSIS_5 cmsis.pack
"""
}
stage("Build") {
- sh "tf-m-ci-scripts/run-build.sh"
+ sh "tf-m-ci-scripts/run-build.sh 2>&1 | tee build.log"
}
stage("Post") {
- archiveArtifacts 'trusted-firmware-m/build/install/**'
+ archiveArtifacts 'trusted-firmware-m/build/install/**,build.log'
cleanWs()
}
}
diff --git a/jenkins/build-docs.jpl b/jenkins/build-docs.jpl
index 8c967fd..ba5e1e1 100644
--- a/jenkins/build-docs.jpl
+++ b/jenkins/build-docs.jpl
@@ -34,13 +34,15 @@
branches: [[name: 'FETCH_HEAD']],
userRemoteConfigs: [[
refspec: 'refs/tags/$MBEDCRYPTO_VERSION',
- url: 'https://github.com/ARMmbed/mbed-crypto.git'
+ url: params.MBEDCRYPTO_URL
]]
]
)
}
sh """
-wget -O cmsis.pack -q https://github.com/ARM-software/CMSIS_5/releases/download/${CMSIS_VERSION}/ARM.CMSIS.${CMSIS_VERSION}.pack
+# Host https://github.com/Arm-software/CMSIS_5/releases/download/5.5.0/ARM.CMSIS.5.5.0.pack
+# under \$JENKINS_HOME/userContent
+wget -O cmsis.pack -q \${JENKINS_URL}/userContent/ARM.CMSIS.${CMSIS_VERSION}.pack
unzip -o -d CMSIS_5 cmsis.pack
"""
}
diff --git a/jenkins/checkpatch.jpl b/jenkins/checkpatch.jpl
index f43ef58..5a447b5 100644
--- a/jenkins/checkpatch.jpl
+++ b/jenkins/checkpatch.jpl
@@ -51,13 +51,15 @@
branches: [[name: 'FETCH_HEAD']],
userRemoteConfigs: [[
refspec: 'refs/tags/$MBEDCRYPTO_VERSION',
- url: 'https://github.com/ARMmbed/mbed-crypto.git'
+ url: params.MBEDCRYPTO_URL
]]
]
)
}
sh """
-wget -O cmsis.pack -q https://github.com/ARM-software/CMSIS_5/releases/download/${CMSIS_VERSION}/ARM.CMSIS.${CMSIS_VERSION}.pack
+# Host https://github.com/Arm-software/CMSIS_5/releases/download/5.5.0/ARM.CMSIS.5.5.0.pack
+# under \$JENKINS_HOME/userContent
+wget -O cmsis.pack -q \${JENKINS_URL}/userContent/ARM.CMSIS.${CMSIS_VERSION}.pack
unzip -o -d CMSIS_5 cmsis.pack
"""
}
diff --git a/jenkins/ci.jpl b/jenkins/ci.jpl
index 9f52c62..743d6c5 100644
--- a/jenkins/ci.jpl
+++ b/jenkins/ci.jpl
@@ -6,6 +6,8 @@
//
//-------------------------------------------------------------------------------
+library identifier: 'local-lib@master', retriever: legacySCM(scm)
+
def listConfigs(ci_scripts_dir, config_list, filter_group) {
dir(ci_scripts_dir) {
echo "Obtaining list of configs."
@@ -39,11 +41,22 @@
params += string(name: 'CMSIS_VERSION', value: env.CMSIS_VERSION)
params += string(name: 'MBEDCRYPTO_VERSION', value: env.MBEDCRYPTO_VERSION)
params += string(name: 'CODE_REPO', value: env.CODE_REPO)
- return {
- def res = build(job: 'tf-m-build-config', parameters: params, propagate: false)
- print("${res.number}: ${config} ${res.result} ${res.getAbsoluteUrl()}")
- if (res.result == "FAILURE") {
- error("Build failed at ${res.getAbsoluteUrl()}")
+ return { -> results
+ def build_res = build(job: 'tf-m-build-config', parameters: params, propagate: false)
+ def build_info = [build_res, config, params_collection]
+ results['builds'][build_res.number] = build_info
+ def build_url = build_res.getAbsoluteUrl()
+ print("${build_res.number}: ${config} ${build_res.result} ${build_url}")
+ failure_states = ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]
+ if (build_res.result in failure_states) {
+ error("Build failed at ${build_url}")
+ }
+ else {
+ print("Doing LAVA stuff for ${build_url}")
+ params += string(name: 'BUILD_NUMBER', value: "${build_res.number}")
+ params += string(name: 'BUILD_URL', value: build_url)
+ def lava_res = build(job: 'tf-m-lava-submit', parameters: params, propagate: false)
+ results['lava_jobs'] += lava_res.getDescription()
}
}
}
@@ -58,12 +71,29 @@
return {
def res = build(job: 'tf-m-build-docs', parameters: params, propagate:false)
print("${res.number}: Docs ${res.result} ${res.getAbsoluteUrl()}")
- if (res.result == "FAILURE") {
+ if (res.result in ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]) {
error("Build failed at ${res.getAbsoluteUrl()}")
}
}
}
+
+def buildCsv(results) {
+ def csvContent = summary.getBuildCsv(results)
+ node("master") {
+ writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
+ archiveArtifacts 'build_results.csv'
+ }
+}
+
+def writeSummary(results) {
+ def buildLinks = summary.getLinks(results)
+ node("master") {
+ writeFile file: "build_links.html", text: buildLinks
+ archiveArtifacts 'build_links.html'
+ }
+}
+
def verifyStatus(value, stage_name) {
node("docker-amd64-xenial") {
cleanWs()
@@ -100,6 +130,8 @@
stage("Configs") {
// Populate configs
listConfigs('tf-m-ci-scripts', configs, env.FILTER_GROUP)
+ results['builds'] = [:]
+ results['lava_jobs'] = []
for (config in configs) {
builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP)
}
@@ -111,10 +143,26 @@
try {
parallel(builds)
} catch (Exception e) {
+ print(e)
manager.buildFailure()
verify = -1
} finally {
+ print("Verifying status")
verifyStatus(verify, 'build')
+ print("Building CSV")
+ buildCsv(results['builds'])
+ writeSummary(results['builds'])
}
}
-// TODO Test phase
+node("docker-amd64-xenial") {
+ stage("Tests") {
+ dir("tf-m-ci-scripts") {
+ git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+ }
+ print("Wait for LAVA results here...")
+ results['lava_jobs'].each { result ->
+ print(result)
+ }
+ }
+ cleanWs()
+}
diff --git a/jenkins/cppcheck.jpl b/jenkins/cppcheck.jpl
index eacdc3a..6ce22b4 100644
--- a/jenkins/cppcheck.jpl
+++ b/jenkins/cppcheck.jpl
@@ -51,13 +51,15 @@
branches: [[name: 'FETCH_HEAD']],
userRemoteConfigs: [[
refspec: 'refs/tags/$MBEDCRYPTO_VERSION',
- url: 'https://github.com/ARMmbed/mbed-crypto.git'
+ url: params.MBEDCRYPTO_URL
]]
]
)
}
sh """
-wget -O cmsis.pack -q https://github.com/ARM-software/CMSIS_5/releases/download/${CMSIS_VERSION}/ARM.CMSIS.${CMSIS_VERSION}.pack
+# Host https://github.com/Arm-software/CMSIS_5/releases/download/5.5.0/ARM.CMSIS.5.5.0.pack
+# under \$JENKINS_HOME/userContent
+wget -O cmsis.pack -q \${JENKINS_URL}/userContent/ARM.CMSIS.${CMSIS_VERSION}.pack
unzip -o -d CMSIS_5 cmsis.pack
"""
}
diff --git a/jenkins/lava-submit.jpl b/jenkins/lava-submit.jpl
new file mode 100644
index 0000000..fbb8611
--- /dev/null
+++ b/jenkins/lava-submit.jpl
@@ -0,0 +1,43 @@
+#!/usr/bin/env groovy
+//-------------------------------------------------------------------------------
+// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-3-Clause
+//
+//-------------------------------------------------------------------------------
+
+node("docker-amd64-xenial") {
+ stage("Init") {
+ cleanWs()
+ dir("tf-m-ci-scripts") {
+ git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+ }
+ }
+ stage("LAVA") {
+ withCredentials([usernamePassword(credentialsId: 'LAVA_CREDENTIALS', passwordVariable: 'LAVA_TOKEN', usernameVariable: 'LAVA_USER')]) {
+ print("Generating LAVA jobs...")
+ def bl2_string = ""
+ if (env.BL2.equals("True")) {
+ bl2_string = "--bl2"
+ }
+ dir("tf-m-ci-scripts") {
+ def res = sh(script: """./lava_helper/lava_create_jobs.py \
+--build-number ${env.BUILD_NUMBER} --output-dir lava_jobs \
+--compiler ${env.COMPILER} --platform ${env.TARGET_PLATFORM} \
+${bl2_string} --build-type ${env.CMAKE_BUILD_TYPE} \
+--jenkins-build-url ${env.BUILD_URL} --proj-config ${env.PROJ_CONFIG}
+""", returnStdout: true).trim()
+ print(res)
+ job_ids = sh(script: """./lava_helper/lava_submit_jobs.py \
+ --lava-url https://tf.validation.linaro.org --job-dir lava_jobs \
+ --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN}
+ """, returnStdout: true).trim()
+ currentBuild.setDescription(job_ids)
+ }
+ }
+ }
+ stage("Post") {
+ archiveArtifacts artifacts: 'tf-m-ci-scripts/lava_jobs/**', allowEmptyArchive: true
+ cleanWs()
+ }
+}
diff --git a/lava_helper/jinja2_templates/template_tfm_mps2_fvp.jinja2 b/lava_helper/jinja2_templates/template_tfm_mps2_fvp.jinja2
new file mode 100644
index 0000000..aec5bd2
--- /dev/null
+++ b/lava_helper/jinja2_templates/template_tfm_mps2_fvp.jinja2
@@ -0,0 +1,77 @@
+{#------------------------------------------------------------------------------
+# Copyright (c) 2018-2019, Arm Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#-----------------------------------------------------------------------------#}
+{% extends 'jinja2_templates/base.jinja2' %}
+{% block metadata %}
+{{ super() }}
+{% endblock %}
+{% block base %}
+{{ super() }}
+{% endblock %}
+{% block actions %}
+context:
+ kernel_start_message: ''
+
+actions:
+- deploy:
+ namespace: docker
+ to: fvp
+ images:
+ ns:
+ url: {{ firmware_url }}
+ s:
+ url: {{ bootloader_url }}
+
+- boot:
+ namespace: docker
+ method: fvp
+ docker:
+ name: 'replace_docker_prefix/lava-fvp-mps2'
+ prompts:
+ - 'root@lava '
+ image: /opt/model/FVP_MPS2_AEMv8M
+ timeout:
+ minutes: 5
+ console_string: 'telnetterminal0: Listening for serial connection on port (?P<PORT>\d+)'
+ license_variable: 'replace_licence_variable'
+ arguments:
+ - "--application cpu0={S}"
+ - "--data cpu0={NS}@0x00100000"
+ - "--simlimit 1200"
+ - "--parameter fvp_mps2.platform_type=2"
+ - "--parameter cpu0.INITVTOR_S=0x10000000"
+ - "--parameter cpu0.semihosting-enable=0"
+ - "--parameter fvp_mps2.DISABLE_GATING=0"
+ - "--parameter fvp_mps2.telnetterminal0.start_telnet=1"
+ - "--parameter fvp_mps2.telnetterminal1.start_telnet=1"
+ - "--parameter fvp_mps2.telnetterminal2.start_telnet=1"
+ - "--parameter fvp_mps2.telnetterminal0.quiet=0"
+ - "--parameter fvp_mps2.telnetterminal1.quiet=0"
+ - "--parameter fvp_mps2.telnetterminal2.quiet=0"
+ - "--parameter fvp_mps2.UART0.unbuffered_output=1"
+ - "--parameter fvp_mps2.UART0.shutdown_on_eot=1"
+ - "--parameter fvp_mps2.UART1.unbuffered_output=1"
+ - "--parameter fvp_mps2.UART1.shutdown_on_eot=1"
+ - "--parameter fvp_mps2.UART2.unbuffered_output=1"
+ - "--parameter fvp_mps2.UART2.shutdown_on_eot=1"
+ - "--parameter fvp_mps2.mps2_visualisation.disable-visualisation=1"
+ - "--parameter cpu0.baseline=1"
+ prompts:
+ - 'Jumping to non-secure code'
+
+- test:
+ namespace: target
+ monitors:
+ {%- for monitor in test.monitors %}
+ - name: "{{monitor.name}}_{{ platform }}_{{ compiler }}_{{ name }}_{{ build_type }}_{{ boot_type }}"
+ start: "{{monitor.start}}"
+ end: "{{monitor.end}}"
+ pattern: "{{monitor.pattern}}"
+ fixupdict:
+ '{{monitor.fixup.pass}}': pass
+ '{{monitor.fixup.fail}}': fail
+ {% endfor %}
+{% endblock %}
diff --git a/lava_helper/lava_helper_configs.py b/lava_helper/lava_helper_configs.py
index b1ddd4e..888b0f9 100644
--- a/lava_helper/lava_helper_configs.py
+++ b/lava_helper/lava_helper_configs.py
@@ -172,8 +172,137 @@
} # Tests
}
+
+tfm_mps2_fvp = {
+ "templ": "template_tfm_mps2_fvp.jinja2",
+ "job_name": "mps-fvp",
+ "device_type": "fvp",
+ "job_timeout": 180,
+ "action_timeout": 90,
+ "monitor_timeout": 90,
+ "poweroff_timeout": 5,
+ "recovery_store_url": "%(jenkins_url)s/"
+ "job/%(jenkins_job)s",
+ "artifact_store_url": "%(jenkins_url)s/"
+ "job/%(jenkins_job)s",
+ "platforms": {"AN519": "mps2_an521_v3.0.tar.gz"},
+ "compilers": ["GNUARM"],
+ "build_types": ["Debug", "Release"],
+ "boot_types": ["BL2"],
+ "tests": {
+ 'Default': {
+ "binaries": {
+ "firmware": "tfm_s.axf",
+ "bootloader": "tfm_ns.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': '[Sec Thread]',
+ 'end': '\\x1b\\\[0m',
+ 'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+ r'(?P<test_case_id>Secure image '
+ r'initializing)(?P<result>!)',
+ 'fixup': {"pass": "!", "fail": ""},
+ 'required': ["secure_image_initializing"]
+ } # Monitors
+ ]
+ }, # Default
+ 'Regression': {
+ "binaries": {
+ "firmware": "tfm_s.axf",
+ "bootloader": "tfm_ns.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': 'Secure test suites summary',
+ 'end': 'End of Secure test suites',
+ 'pattern': r"[\x1b]\\[37mTest suite '(?P<"
+ r"test_case_id>[^\n]+)' has [\x1b]\\[32m "
+ r"(?P<result>PASSED|FAILED)",
+ 'fixup': {"pass": "PASSED", "fail": "FAILED"},
+ 'required': [
+ ("psa_protected_storage_"
+ "s_interface_tests_tfm_sst_test_2xxx_"),
+ "sst_reliability_tests_tfm_sst_test_3xxx_",
+ "sst_rollback_protection_tests_tfm_sst_test_4xxx_",
+ ("psa_internal_trusted_storage_"
+ "s_interface_tests_tfm_its_test_2xxx_"),
+ "its_reliability_tests_tfm_its_test_3xxx_",
+ ("audit_"
+ "logging_secure_interface_test_tfm_audit_test_1xxx_"),
+ "crypto_secure_interface_tests_tfm_crypto_test_5xxx_",
+ ("initial_attestation_service_"
+ "secure_interface_tests_tfm_attest_test_1xxx_"),
+ ]
+ },
+ {
+ 'name': 'Non_Secure_Test_Suites_Summary',
+ 'start': 'Non-secure test suites summary',
+ 'end': r'End of Non-secure test suites',
+ 'pattern': r"[\x1b]\\[37mTest suite '(?P"
+ r"<test_case_id>[^\n]+)' has [\x1b]\\[32m "
+ r"(?P<result>PASSED|FAILED)",
+ 'fixup': {"pass": "PASSED", "fail": "FAILED"},
+ 'required': [
+ ("psa_protected_storage"
+ "_ns_interface_tests_tfm_sst_test_1xxx_"),
+ ("psa_internal_trusted_storage"
+ "_ns_interface_tests_tfm_its_test_1xxx_"),
+ ("auditlog_"
+ "non_secure_interface_test_tfm_audit_test_1xxx_"),
+ ("crypto_"
+ "non_secure_interface_test_tfm_crypto_test_6xxx_"),
+ ("initial_attestation_service_"
+ "non_secure_interface_tests_tfm_attest_test_2xxx_"),
+ "core_non_secure_positive_tests_tfm_core_test_1xxx_"
+ ]
+ }
+ ] # Monitors
+ }, # Regression
+ 'CoreIPC': {
+ "binaries": {
+ "firmware": "tfm_s.axf",
+ "bootloader": "tfm_ns.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': '[Sec Thread]',
+ 'end': '\\x1b\\\[0m',
+ 'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+ r'(?P<test_case_id>Secure image '
+ r'initializing)(?P<result>!)',
+ 'fixup': {"pass": "!", "fail": ""},
+ 'required': ["secure_image_initializing"]
+ } # Monitors
+ ]
+ }, # CoreIPC
+ 'CoreIPCTfmLevel2': {
+ "binaries": {
+ "firmware": "tfm_s.axf",
+ "bootloader": "tfm_ns.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': '[Sec Thread]',
+ 'end': '\\x1b\\\[0m',
+ 'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+ r'(?P<test_case_id>Secure image '
+ r'initializing)(?P<result>!)',
+ 'fixup': {"pass": "!", "fail": ""},
+ 'required': ["secure_image_initializing"]
+ } # Monitors
+ ]
+ }, # CoreIPCTfmLevel2
+ } # Tests
+}
+
# All configurations should be mapped here
-lava_gen_config_map = {"tfm_mps2_sse_200": tfm_mps2_sse_200}
+lava_gen_config_map = {"tfm_mps2_sse_200": tfm_mps2_sse_200,
+ "tfm_mps2_fvp": tfm_mps2_fvp}
lavagen_config_sort_order = [
"templ",
"job_name",
diff --git a/lava_helper/lava_submit_jobs.py b/lava_helper/lava_submit_jobs.py
new file mode 100755
index 0000000..b52bed1
--- /dev/null
+++ b/lava_helper/lava_submit_jobs.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+"""
+Script for submitting multiple LAVA definitions
+"""
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+import os
+import glob
+import sys
+import shutil
+import argparse
+from copy import deepcopy
+from collections import OrderedDict
+from jinja2 import Environment, FileSystemLoader
+from lava_helper_configs import *
+
+try:
+ from tfm_ci_pylib.utils import (
+ save_json,
+ load_json,
+ sort_dict,
+ load_yaml,
+ test,
+ print_test,
+ )
+ from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
+except ImportError:
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(os.path.join(dir_path, "../"))
+ from tfm_ci_pylib.utils import (
+ save_json,
+ load_json,
+ sort_dict,
+ load_yaml,
+ test,
+ print_test,
+ )
+ from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
+
+
+def test_lava_dispatch_credentials(user_args):
+ """ Will validate if provided token/credentials are valid. It will return
+ a valid connection or exit program if not"""
+
+ # Collect the authentication tokens
+ try:
+ if user_args.token_from_env:
+ usr = os.environ['LAVA_USER']
+ secret = os.environ['LAVA_TOKEN']
+ elif user_args.lava_token and user_args.lava_user:
+ usr = user_args.lava_user
+ secret = user_args.lava_token
+
+ # Do not submit job without complete credentials
+ if not len(usr) or not len(secret):
+ raise Exception("Credentials not set")
+
+ lava = LAVA_RPC_connector(usr,
+ secret,
+ user_args.lava_url)
+
+ # Test the credentials againist the backend
+ if not lava.test_credentials():
+ raise Exception("Server rejected user authentication")
+ except Exception as e:
+ print("Credential validation failed with : %s" % e)
+ print("Did you set set --lava-token, --lava-user?")
+ sys.exit(1)
+ return lava
+
+def list_files_from_dir(user_args):
+ file_list = []
+ for filename in glob.iglob(user_args.job_dir + '**/*.yaml', recursive=True):
+ file_list.append(filename)
+ print("Found job {}".format(filename))
+ return file_list
+
+def lava_dispatch(user_args):
+ """ Submit a job to LAVA backend, block untill it is completed, and
+ fetch the results files if successful. If not, calls sys exit with 1
+ return code """
+
+ lava = test_lava_dispatch_credentials(user_args)
+ file_list = list_files_from_dir(user_args)
+ job_id_list = []
+ for job_file in file_list:
+ job_id, job_url = lava.submit_job(job_file)
+
+ # The reason of failure will be reported to user by LAVA_RPC_connector
+ if job_id is None and job_url is None:
+ print("Job failed")
+ else:
+ print("Job submitted at: " + job_url)
+ job_id_list.append(job_id)
+
+ print("\n".join(str(x) for x in job_id_list))
+
+def main(user_args):
+ lava_dispatch(user_args)
+
+
+def get_cmd_args():
+ """ Parse command line arguments """
+
+ # Parse command line arguments to override config
+ parser = argparse.ArgumentParser(description="Lava Create Jobs")
+ cmdargs = parser.add_argument_group("Create LAVA Jobs")
+
+ # Configuration control
+ cmdargs.add_argument(
+ "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)"
+ )
+ cmdargs.add_argument(
+ "--job-dir", dest="job_dir", action="store", help="LAVA jobs directory"
+ )
+ cmdargs.add_argument(
+ "--lava-token", dest="lava_token", action="store", help="LAVA auth token"
+ )
+ cmdargs.add_argument(
+ "--lava-user", dest="lava_user", action="store", help="LAVA username"
+ )
+ cmdargs.add_argument(
+ "--use-env", dest="token_from_env", action="store_true", default=False, help="LAVA username"
+ )
+ cmdargs.add_argument(
+ "--lava-timeout", dest="dispatch_timeout", action="store", default=3600, help="LAVA username"
+ )
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ main(get_cmd_args())
diff --git a/tfm_ci_pylib/lava_rpc_connector.py b/tfm_ci_pylib/lava_rpc_connector.py
index 4006c45..e7571db 100644
--- a/tfm_ci_pylib/lava_rpc_connector.py
+++ b/tfm_ci_pylib/lava_rpc_connector.py
@@ -112,10 +112,13 @@
job_definition)
print(e)
return None, None
-
- job_id = self.scheduler.submit_job(job_data)
- job_url = self.server_job_prefix % job_id
- return(job_id, job_url)
+ try:
+ job_id = self.scheduler.submit_job(job_data)
+ job_url = self.server_job_prefix % job_id
+ return(job_id, job_url)
+ except Exception as e:
+ print(e)
+ return(None, None)
def resubmit_job(self, job_id):
""" Re-submit job with provided id. Returns resulting job id,
diff --git a/tfm_ci_pylib/tfm_build_manager.py b/tfm_ci_pylib/tfm_build_manager.py
index 56bf469..d4b4545 100644
--- a/tfm_ci_pylib/tfm_build_manager.py
+++ b/tfm_ci_pylib/tfm_build_manager.py
@@ -75,17 +75,17 @@
super(TFM_Build_Manager, self).__init__(name="TFM_Build_Manager")
- def print_config(self):
- """Prints a list of available build configurations"""
- print("\n".join(list(self._tbm_build_cfg.keys())))
+ def get_config(self):
+ return list(self._tbm_build_cfg.keys())
- def print_config_environment(self, config):
+ def print_config_environment(self, config, silence_stderr=False):
"""
For a given build configuration from output of print_config
method, print environment variables to build.
"""
if config not in self._tbm_build_cfg:
- print("Error: no such config {}".format(config), file=sys.stderr)
+ if not silence_stderr:
+ print("Error: no such config {}".format(config), file=sys.stderr)
sys.exit(1)
config_details = self._tbm_build_cfg[config]
argument_list = [
diff --git a/vars/summary.groovy b/vars/summary.groovy
new file mode 100644
index 0000000..31a23fe
--- /dev/null
+++ b/vars/summary.groovy
@@ -0,0 +1,67 @@
+#!/usr/bin/env groovy
+//-------------------------------------------------------------------------------
+// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-3-Clause
+//
+//-------------------------------------------------------------------------------
+
+@NonCPS
+def getBuildCsv(results) {
+ def table = [:]
+ def projects = []
+ results.each { result ->
+ res = result.value[0]
+ config = result.value[1]
+ params = result.value[2]
+ if (params['BL2'] == 'True') {
+ bl2_string = 'BL2'
+ } else {
+ bl2_string = 'NOBL2'
+ }
+ row_string = "${params['TARGET_PLATFORM']}_${params['COMPILER']}_${params['CMAKE_BUILD_TYPE']}_${bl2_string}"
+ column_string = "${params['PROJ_CONFIG']}"
+ row = table[row_string]
+ if (row == null) {
+ row = [:]
+ }
+ row[column_string] = res.getResult()
+ table[row_string] = row
+ if(!projects.contains(params['PROJ_CONFIG'])) {
+ projects += params['PROJ_CONFIG']
+ }
+ }
+ header = []
+ header += "" // top left
+ header.addAll(projects)
+ header.sort { it.toLowerCase() }
+ csvContent = []
+ for (row in table) {
+ row_item = []
+ row_item += row.key
+ for (project in projects) {
+ result = table[row.key][project]
+ if (result == null) {
+ result = "N/A"
+ }
+ row_item += result
+ }
+ csvContent.add(row_item)
+ }
+ csvContent.sort { it[0].toLowerCase() }
+ csvContent.add(0, header)
+ return csvContent
+}
+
+@NonCPS
+def getLinks(results) {
+ linksContent = []
+ results.each { result ->
+ res = result.value[0]
+ config = result.value[1]
+ url = res.getAbsoluteUrl()
+ linksContent.add("${config}: <a href=\"${url}\">Job</a>/<a href=\"${url}/artifact/build.log/*view*/\">Logs</a>/<a href=\"${url}/artifact/\">Artifacts</a><br/>")
+ }
+ linksContent.sort()
+ return linksContent.join("\n")
+}