Open CI Scripts: Feature Update
* build_helper: Added --install argument to execute cmake install
* build_helper: Added the capability to parse axf files for
code/data/bss sizes and capture it to report
* build_helper: Added --relative-paths to calculate paths relative
to the root of the workspace
* build_helper_configs: Full restructure of config modules.
Extra build commands and expected artefacts can be defined per
platform basis
* Checkpatch: Added directive to ignore --ignore SPDX_LICENSE_TAG
and added the capability to run only on files changed in patch.
* CppCheck adjusted suppression directories for new external
libraries and code-base restructure
* Added fastmodel dispatcher. It will wrap around fastmodels
and test against a dynamically defined test_map. Fed with an
input of the build summary fastmodel dispatcher will detect
builds which have tests in the map and run them.
* Added Fastmodel configs for AN519 and AN521 platforms
* lava_helper. Added arguments for --override-jenkins-job/
--override-jenkins-url
* Adjusted JINJA2 template to include build number and
enable the overrides.
* Adjusted lava helper configs to support dual platform firmware
and added CoreIPC config
* Added report parser module to create/read/evaluate and
modify reports. Bash scripts for cppcheck checkpatch summaries
have been removed.
* Adjusted run_cppcheck/run_checkpatch for new project libraries,
new codebase structure and other tweaks.
* Restructured build manager, decoupling it from the tf-m
cmake requirements. Build manager can now dynamically build a
configuration from combination of parameters or can just execute
an array of build commands. Hardcoded tf-m assumptions have been
removed and moved into the configuration space.
* Build system can now produce MUSCA_A/ MUSCA_B1 binaries as well
as intel HEX files.
* Updated the utilities snippet collection in the tfm-ci-pylib.
Change-Id: Ifad7676e1cd47e3418e851b56dbb71963d85cd88
Signed-off-by: Minos Galanakis <minos.galanakis@linaro.org>
diff --git a/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2 b/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
index b397065..b80884c 100644
--- a/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
+++ b/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
@@ -14,7 +14,7 @@
minutes: {{ action_timeout }}
actions:
power-off:
- seconds: 30
+ seconds: {{ poweroff_timeout }}
connections:
lava-test-monitor:
minutes: {{ monitor_timeout }}
@@ -23,30 +23,30 @@
visibility: public
actions:
-{%- for platform, recovery in platforms.items() %}
+{%- for platform, recovery in platforms.items()|sort(reverse=false) %}
- deploy:
to: mps
images:
recovery_image:
- url: {{ recovery_store_url }}/lastSuccessfulBuild/artifact/{{ recovery }}
+ url: {{ recovery_store_url }}/{{ build_no }}/artifact/{{ recovery }}
compression: gz
- namespace: target
- {%- for compiler in compilers %}
- {%- for build_type in build_types %}
- {%- for boot_type in boot_types %}
- {% for name, test in tests.items() %}
+ namespace: target_{{ recovery | replace(".tar.gz", "") }}
+{% for compiler in compilers|sort(reverse=true) %}
+ {%- for build_type in build_types|sort(reverse=false) %}
+ {%- for boot_type in boot_types|sort(reverse=false) %}
+ {%- for name, test in tests.items()|sort(reverse=false) %}
- deploy:
to: mps
images:
test_binary:
- url: {{artifact_store_url}}/{{ build_no}}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/{{ test.binaries.firmware }}
+ url: {{artifact_store_url}}/{{ build_no }}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/install/outputs/{{ platform }}/{{ test.binaries.firmware }}
namespace: target
- deploy:
to: mps
images:
test_binary:
- url: {{artifact_store_url}}/{{ build_no}}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/{{ test.binaries.bootloader }}
+ url: {{artifact_store_url}}/{{ build_no }}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/install/outputs/{{ platform }}/{{ test.binaries.bootloader }}
namespace: target
- boot:
@@ -66,8 +66,9 @@
fixupdict:
'{{monitor.fixup.pass}}': pass
'{{monitor.fixup.fail}}': fail
+
{%- endfor %}
- {% endfor %}
+ {%- endfor %}
{%- endfor %}
{%- endfor %}
{%- endfor %}
diff --git a/lava_helper/lava_helper.py b/lava_helper/lava_helper.py
index 4e8ed88..783ed04 100755
--- a/lava_helper/lava_helper.py
+++ b/lava_helper/lava_helper.py
@@ -19,7 +19,7 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import os
import sys
@@ -31,13 +31,13 @@
try:
from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
- load_yaml, test
+ load_yaml, test, print_test
from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
except ImportError:
dir_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(dir_path, "../"))
from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
- load_yaml, test
+ load_yaml, test, print_test
from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
@@ -165,26 +165,60 @@
# Call the formatter
list(map(format_results, test_results))
+ # Remove the ignored commits if requested
+ if user_args.ignore_configs:
+ print(user_args.ignore_configs)
+ for cfg in user_args.ignore_configs:
+ try:
+ print("Rejecting config: ", cfg)
+ t_dict.pop(cfg)
+ except KeyError as e:
+ print("Warning! Rejected config %s not found"
+ " in LAVA results" % cfg)
+
# We need to check that each of the tests contained in the test_map exist
# AND that they have a passed status
t_sum = 0
+
+ with open("lava_job.url", "r") as F:
+ job_url = F.read().strip()
+
+ out_rep = {"report": {},
+ "_metadata_": {"job_url": job_url}}
for k, v in t_dict.items():
try:
- t_sum += int(test(test_map[k],
- v,
- pass_text=["pass"],
- error_on_failed=False,
- test_name=k,
- summary=user_args.lava_summary)["success"])
+ out_rep["report"][k] = test(test_map[k],
+ v,
+ pass_text=["pass"],
+ error_on_failed=False,
+ test_name=k,
+ summary=user_args.lava_summary)
+ t_sum += int(out_rep["report"][k]["success"])
# Status can be None if a test did't fully run/complete
except TypeError as E:
t_sum = 1
+ print("\n")
+ sl = [x["name"] for x in out_rep["report"].values()
+ if x["success"] is True]
+ fl = [x["name"] for x in out_rep["report"].values()
+ if x["success"] is False]
+
+ if sl:
+ print_test(t_list=sl, status="passed", tname="Tests")
+ if fl:
+ print_test(t_list=fl, status="failed", tname="Tests")
+
+ # Generate the output report is requested
+ if user_args.output_report:
+ save_json(user_args.output_report, out_rep)
# Every single of the tests need to have passed for group to succeed
if t_sum != len(t_dict):
print("Group Testing FAILED!")
- sys.exit(1)
- print("Group Testing PASS!")
+ if user_args.eif:
+ sys.exit(1)
+ else:
+ print("Group Testing PASS!")
def test_lava_dispatch_credentials(user_args):
@@ -226,10 +260,19 @@
lava = test_lava_dispatch_credentials(user_args)
job_id, job_url = lava.submit_job(user_args.dispatch)
- print("Job submitted at: " + job_url)
+
+ # The reason of failure will be reported to user by LAVA_RPC_connector
+ if job_id is None and job_url is None:
+ sys.exit(1)
+ else:
+ print("Job submitted at: " + job_url)
+
with open("lava_job.id", "w") as F:
F.write(str(job_id))
print("Job id %s stored at lava_job.id file." % job_id)
+ with open("lava_job.url", "w") as F:
+ F.write(str(job_url))
+ print("Job url %s stored at lava_job.url file." % job_id)
# Wait for the job to complete
status = lava.block_wait_for_job(job_id, int(user_args.dispatch_timeout))
@@ -270,6 +313,18 @@
config["build_no"] = user_args.build_no
+ # Override with command line provided URL/Job Name
+ if user_args.jenkins_url:
+ _over_d = {"jenkins_url": user_args.jenkins_url,
+ "jenkins_job": "%(jenkins_job)s"}
+ config["recovery_store_url"] = config["recovery_store_url"] % _over_d
+ config["artifact_store_url"] = config["artifact_store_url"] % _over_d
+
+ if user_args.jenkins_job:
+ _over_d = {"jenkins_job": user_args.jenkins_job}
+ config["recovery_store_url"] = config["recovery_store_url"] % _over_d
+ config["artifact_store_url"] = config["artifact_store_url"] % _over_d
+
# Add the template folder
config["templ"] = os.path.join(user_args.template_dir, config["templ"])
return config
@@ -289,6 +344,8 @@
save_config(config_file, lava_gen_config_map[config_key])
print("Configuration exported at %s" % config_file)
return
+ if user_args.dispatch is not None or user_args.dispatch_cancel is not None:
+ pass
else:
config = load_config_overrides(user_args)
@@ -372,7 +429,18 @@
dest="platform",
action="store",
help="Override platform.Only the provided one "
- "will be tested ")
+ "will be tested")
+ over_g.add_argument("-ou", "--override-jenkins-url",
+ dest="jenkins_url",
+ action="store",
+ help="Override %(jenkins_url)s params in config if "
+ "present. Sets the jenkings address including "
+ "port")
+ over_g.add_argument("-oj", "--override-jenkins-job",
+ dest="jenkins_job",
+ action="store",
+ help="Override %(jenkins_job)s params in config if "
+ "present. Sets the jenkings job name")
parse_g.add_argument("-tp", "--task-lava-parse",
dest="lava_results",
action="store",
@@ -381,9 +449,24 @@
" of testing")
parse_g.add_argument("-ls", "--lava-parse-summary",
dest="lava_summary",
- default=True,
+ default=False,
action="store_true",
help="Print full test summary")
+ parse_g.add_argument("-or", "--output-report",
+ dest="output_report",
+ action="store",
+ help="Print full test summary")
+ parser.add_argument("-ef", "--error-if-failed",
+ dest="eif",
+ action="store_true",
+ help="If set will change the script exit code if one "
+ "or more tests fail")
+ parser.add_argument('-ic', '--ignore-configs',
+ dest="ignore_configs",
+ nargs='+',
+ help="Pass a space separated list of build"
+ "configurations which will get ignored when"
+ "evaluation LAVA results")
# Lava job control commands
disp_g.add_argument("-td", "--task-dispatch",
diff --git a/lava_helper/lava_helper_configs.py b/lava_helper/lava_helper_configs.py
index 100b0ed..a5cf8c1 100644
--- a/lava_helper/lava_helper_configs.py
+++ b/lava_helper/lava_helper_configs.py
@@ -18,7 +18,7 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
def lava_gen_get_config_subset(config,
@@ -35,6 +35,9 @@
# Remove all configs not requests by the caller
if not default:
tests.pop("Default")
+ if not core:
+ tests.pop("CoreIPC")
+ tests.pop("CoreIPCTfmLevel2")
if not regression:
tests.pop("Regression")
@@ -45,26 +48,24 @@
tfm_mps2_sse_200 = {
"templ": "template_tfm_mps2_sse_200.jinja2",
"job_name": "mps2plus-arm-tfm",
- "device_type": "mps",
- "job_timeout": 60,
- "action_timeout": 60,
- "monitor_timeout": 60,
- "recovery_store_url": "https://ci.trustedfirmware.org/"
- "job/tf-m-fpga-image-store",
- "artifact_store_url": "https://ci.trustedfirmware.org/"
- "job/tf-m-build-test-review",
- "platforms": {"AN521": "mps2_sse200_an512.tar.gz"},
+ "device_type": "mps2plus",
+ "job_timeout": 120,
+ "action_timeout": 90,
+ "monitor_timeout": 90,
+ "poweroff_timeout": 10,
+ "recovery_store_url": "%(jenkins_url)s/"
+ "job/%(jenkins_job)s",
+ "artifact_store_url": "%(jenkins_url)s/"
+ "job/%(jenkins_job)s",
+ "platforms": {"AN521": "mps2_an521_v3.0.tar.gz"},
"compilers": ["GNUARM"],
- "build_types": ["Debug"],
+ "build_types": ["Debug", "Release"],
"boot_types": ["BL2"],
"tests": {
'Default': {
- "recovery": "mps2_sse200_an512.tar.gz",
"binaries": {
- "firmware":
- "install/outputs/AN521/tfm_sign.bin",
- "bootloader":
- "install/outputs/AN521/mcuboot.bin"
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
},
"monitors": [
{
@@ -80,10 +81,9 @@
]
}, # Default
'Regression': {
- "recovery": "mps2_sse200_an512.tar.gz",
"binaries": {
- "firmware": "install/outputs/AN521/tfm_sign.bin",
- "bootloader": "install/outputs/AN521/mcuboot.bin"
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
},
"monitors": [
{
@@ -131,6 +131,42 @@
}
] # Monitors
}, # Regression
+ 'CoreIPC': {
+ "binaries": {
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': 'Jumping to the first image slot',
+ 'end': '\\x1b\\\[0m',
+ 'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+ r'(?P<test_case_id>Secure image '
+ r'initializing)(?P<result>!)',
+ 'fixup': {"pass": "!", "fail": ""},
+ 'required': ["secure_image_initializing"]
+ } # Monitors
+ ]
+ }, # CoreIPC
+ 'CoreIPCTfmLevel2': {
+ "binaries": {
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': 'Jumping to the first image slot',
+ 'end': '\\x1b\\\[0m',
+ 'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+ r'(?P<test_case_id>Secure image '
+ r'initializing)(?P<result>!)',
+ 'fixup': {"pass": "!", "fail": ""},
+ 'required': ["secure_image_initializing"]
+ } # Monitors
+ ]
+ }, # CoreIPCTfmLevel2
} # Tests
}