Open CI Scripts: Feature Update
* build_helper: Added --install argument to execute cmake install
* build_helper: Added the capability to parse axf files for
code/data/bss sizes and capture it to report
* build_helper: Added --relative-paths to calculate paths relative
to the root of the workspace
* build_helper_configs: Full restructure of config modules.
Extra build commands and expected artefacts can be defined per
platform basis
* Checkpatch: Added directive to ignore --ignore SPDX_LICENSE_TAG
and added the capability to run only on files changed in patch.
* CppCheck adjusted suppression directories for new external
libraries and code-base restructure
* Added fastmodel dispatcher. It will wrap around fastmodels
and test against a dynamically defined test_map. Fed with an
input of the build summary fastmodel dispatcher will detect
builds which have tests in the map and run them.
* Added Fastmodel configs for AN519 and AN521 platforms
* lava_helper. Added arguments for --override-jenkins-job/
--override-jenkins-url
* Adjusted JINJA2 template to include build number and
enable the overrides.
* Adjusted lava helper configs to support dual platform firmware
and added CoreIPC config
* Added report parser module to create/read/evaluate and
modify reports. Bash scripts for cppcheck checkpatch summaries
have been removed.
* Adjusted run_cppcheck/run_checkpatch for new project libraries,
new codebase structure and other tweaks.
* Restructured build manager, decoupling it from the tf-m
cmake requirements. Build manager can now dynamically build a
configuration from combination of parameters or can just execute
an array of build commands. Hardcoded tf-m assumptions have been
removed and moved into the configuration space.
* Build system can now produce MUSCA_A/ MUSCA_B1 binaries as well
as intel HEX files.
* Updated the utilities snippet collection in the tfm-ci-pylib.
Change-Id: Ifad7676e1cd47e3418e851b56dbb71963d85cd88
Signed-off-by: Minos Galanakis <minos.galanakis@linaro.org>
diff --git a/build_helper/build_helper.py b/build_helper/build_helper.py
index ea8e8f3..58957d7 100755
--- a/build_helper/build_helper.py
+++ b/build_helper/build_helper.py
@@ -20,17 +20,17 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import os
import sys
import time
import argparse
import datetime
-from build_helper_configs import config_AN521
+from build_helper_configs import _builtin_configs
try:
- from tfm_ci_pylib.utils import get_cmd_args, load_json
+ from tfm_ci_pylib.utils import get_cmd_args
from tfm_ci_pylib.tfm_build_manager import TFM_Build_Manager
except ImportError:
dir_path = os.path.dirname(os.path.realpath(__file__))
@@ -39,7 +39,15 @@
from tfm_ci_pylib.tfm_build_manager import TFM_Build_Manager
-def build(tfm_dir, build_dir, buid_report_f, build_config):
+def build(tfm_dir,
+ build_dir,
+ buid_report_f,
+ build_config,
+ parallel_builds=3,
+ build_threads=3,
+ build_install=True,
+ image_sizes=False,
+ relative_paths=False):
""" Instantiate a build manager class and build all configurations """
start_time = time.time()
@@ -48,7 +56,11 @@
work_dir=build_dir,
cfg_dict=build_config,
report=buid_report_f,
- install=True)
+ parallel_builds=parallel_builds,
+ build_threads=build_threads,
+ install=build_install,
+ img_sizes=image_sizes,
+ relative_paths=relative_paths)
bm.start()
bm.join()
build_report = bm.get_report()
@@ -68,13 +80,27 @@
print("Failed to load config %s. Exception: %s" % (build_config,
e.msg))
sys.exit(1)
+ elif user_args.config:
+ if user_args.config in _builtin_configs.keys():
+ build_config = _builtin_configs[user_args.config.lower()]
+ else:
+ print("Configuration %s is not defined in built-in configs" %
+ user_args.config)
+ sys.exit(1)
else:
- build_config = config_AN521
+ print("Error: Configuration not specificed")
+ sys.exit(1)
+
# Build everything
build_status, build_report = build(user_args.tfm_dir,
user_args.build_dir,
user_args.report,
- build_config)
+ build_config,
+ user_args.parallel_builds,
+ user_args.thread_no,
+ user_args.install,
+ user_args.image_sizes,
+ user_args.relative_paths)
if not build_report:
print("Build Report Empty, check build status")
@@ -82,15 +108,16 @@
if build_status:
print("Build Failed")
- sys.exit(1)
+ if user_args.eif:
+ sys.exit(1)
# pprint(build_report)
- print("Build Complete!")
+ print("Build Helper Quitting!")
sys.exit(0)
if __name__ == "__main__":
- # Calcuate the workspace root directory relative to the script location
+ # Calculate the workspace root directory relative to the script location
# Equivalent to realpath $(dirname ./build_helper/build_helper.py)/../../
root_path = os.path.dirname(os.path.realpath(__file__))
for i in range(2):
@@ -102,7 +129,17 @@
action="store",
default="./builds",
help="Where to generate the artifacts")
- parser.add_argument("-c", "--config_file",
+ parser.add_argument("-c", "--config",
+ dest="config",
+ action="store",
+ help="Which of the built-in configs to run."
+ "(%s)" % "/ ".join(_builtin_configs.keys()))
+ parser.add_argument("-e", "--error_if_failed",
+ dest="eif",
+ action="store_true",
+ help="If set will change the script exit code if one "
+ "or more builds fail")
+ parser.add_argument("-f", "--config_file",
dest="config_f",
action="store",
help="Manual configuration override file (JSON)")
@@ -110,10 +147,36 @@
dest="report",
action="store",
help="JSON file containing build report")
+ parser.add_argument("-i", "--install",
+ dest="install",
+ action="store_true",
+ help="Run make install after building config")
parser.add_argument("-t", "--tfm_dir",
dest="tfm_dir",
action="store",
default=os.path.join(root_path, "tf-m"),
help="TFM directory")
-
+ parser.add_argument("-s", "--image-sizes",
+ dest="image_sizes",
+ action="store_true",
+ help="Run arm-none-eabi-size to axf files "
+ "generated by build")
+ parser.add_argument("-l", "--relative-paths",
+ dest="relative_paths",
+ action="store_true",
+ help="When set paths stored in report will be stored"
+ "in a relative path to the execution directory."
+ "Recommended for Jenkins Builds.")
+ parser.add_argument("-p", "--parallel-builds",
+ type=int,
+ dest="parallel_builds",
+ action="store",
+ default=3,
+ help="Number of builds jobs to run in parallel.")
+ parser.add_argument("-n", "--number-of-threads",
+ type=int,
+ dest="thread_no",
+ action="store",
+ default=3,
+ help="Number of threads to use per build job.")
main(get_cmd_args(parser=parser))
diff --git a/build_helper/build_helper_configs.py b/build_helper/build_helper_configs.py
index 39436c8..e48abeb 100644
--- a/build_helper/build_helper_configs.py
+++ b/build_helper/build_helper_configs.py
@@ -18,37 +18,255 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
+# common parameters for tf-m build system
+# This configuration template will be passed into the tfm-builder module after
+# the template evaluation is converted to a command
+
+_common_tfm_builder_cfg = {
+ "config_type": "tf-m",
+ "codebase_root_dir": "tf-m",
+ # Order to which the variants are evaluated. This affects the name of
+ # variant configuration and the wildcard replacement logic in invalid
+ # configuration tuples
+ "sort_order": ["target_platform",
+ "compiler",
+ "proj_config",
+ "cmake_build_type",
+ "with_mcuboot"],
+
+ # Keys for the templace will come from the combinations of parameters
+ # provided in the seed dictionary.
+
+ "config_template": (
+ "cmake -G \"Unix Makefiles\" "
+ "-DPROJ_CONFIG=`"
+ "readlink -f %(codebase_root_dir)s/configs/%(proj_config)s.cmake` "
+ "-DTARGET_PLATFORM=%(target_platform)s "
+ "-DCOMPILER=%(compiler)s "
+ "-DCMAKE_BUILD_TYPE=%(cmake_build_type)s "
+ "-DBL2=%(with_mcuboot)s "
+ "%(codebase_root_dir)s"),
+
+ # A small subset of string substitution params is allowed in commands.
+ # tfm_build_manager will replace %(_tbm_build_dir_)s, %(_tbm_code_dir_)s,
+ # _tbm_target_platform_ with the paths set when building
+
+ "artifact_capture_rex": (r'%(_tbm_build_dir_)s/install/outputs/'
+ r'(?:fvp|AN521|AN519|MUSCA_A|MUSCA_B1)'
+ r'/(\w+\.(?:axf|bin|hex))$'),
+
+ # ALL commands will be executed for every build.
+ # Other keys will append extra commands when matching target_platform
+ "build_cmds": {"all": ["cmake --build ./ -- -j 2 install"],
+ "MUSCA_A": [("srec_cat "
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/mcuboot.bin "
+ "-Binary -offset 0x200000 "
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm_sign.bin "
+ "-Binary -offset 0x220000 -o "
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s"
+ "/tfm.hex -Intel")],
+ "MUSCA_B1": [("srec_cat "
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/mcuboot.bin "
+ "-Binary -offset 0x200000 "
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm_sign.bin "
+ "-Binary -offset 0x220000 -o "
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s"
+ "/tfm.hex -Intel")]
+ },
+
+ # (Optional) If set will fail if those artefacts are missing post build
+ "required_artefacts": {"all": [
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm_s.bin",
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm_ns.bin"],
+ "MUSCA_A": [
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm.hex",
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/mcuboot.bin",
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm_sign.bin"],
+ "MUSCA_B1": [
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm.hex",
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/mcuboot.bin",
+ "%(_tbm_build_dir_)s/install/outputs/"
+ "%(_tbm_target_platform_)s/tfm_sign.bin"]
+ }
+}
# Configure build manager to build several combinations
-config_AN521 = {"platform": ["AN521"],
+config_AN521 = {"seed_params": {
+ "target_platform": ["AN521"],
"compiler": ["GNUARM"],
- "config": ["ConfigRegression",
- "ConfigDefault"],
- "build": ["Debug"],
- "with_mcuboot": [True],
+ "proj_config": ["ConfigRegression",
+ "ConfigCoreIPC",
+ "ConfigCoreIPCTfmLevel2",
+ "ConfigDefault"],
+ "cmake_build_type": ["Debug", "Release"],
+ "with_mcuboot": [True, False],
+ },
+ "common_params": _common_tfm_builder_cfg,
# invalid configuations can be added as tuples of adjustable
# resolution "AN521" will reject all combinations for that
# platform while ("AN521", "GNUARM") will only reject GCC ones
"invalid": []
}
-_builtin_configs = {"AN521_gnuarm_Config_DRC": config_AN521}
+
+# Configure build manager to build several combinations
+config_AN519 = {"seed_params": {
+ "target_platform": ["AN519"],
+ "compiler": ["GNUARM"],
+ "proj_config": ["ConfigRegression",
+ "ConfigCoreIPC",
+ "ConfigCoreIPCTfmLevel2",
+ "ConfigDefault"],
+ "cmake_build_type": ["Debug", "Release"],
+ "with_mcuboot": [True, False],
+ },
+ "common_params": _common_tfm_builder_cfg,
+ # invalid configuations can be added as tuples of adjustable
+ # resolution "AN521" will reject all combinations for that
+ # platform while ("AN521", "GNUARM") will only reject GCC ones
+ "invalid": []
+ }
+
+config_IPC = {"seed_params": {
+ "target_platform": ["AN521", "AN519", "MUSCA_A", "MUSCA_B1"],
+ "compiler": ["ARMCLANG", "GNUARM"],
+ "proj_config": ["ConfigCoreIPC",
+ "ConfigCoreIPCTfmLevel2"],
+ "cmake_build_type": ["Debug", "Release"],
+ "with_mcuboot": [True, False],
+ },
+ "common_params": _common_tfm_builder_cfg,
+ # invalid configuations can be added as tuples of adjustable
+ # resolution "AN521" will reject all combinations for that
+ # platform while ("AN521", "GNUARM") will only reject GCC
+ "invalid": [("MUSCA_B1", "*", "*", "*", False)]
+ }
+
+# Configure build manager to build the maximum number of configurations
+config_full = {"seed_params": {
+ "target_platform": ["AN521", "AN519", "MUSCA_A", "MUSCA_B1"],
+ "compiler": ["ARMCLANG", "GNUARM"],
+ "proj_config": ["ConfigRegression",
+ "ConfigCoreIPC",
+ "ConfigCoreIPCTfmLevel2",
+ "ConfigDefault"],
+ "cmake_build_type": ["Debug", "Release"],
+ "with_mcuboot": [True, False],
+ },
+ "common_params": _common_tfm_builder_cfg,
+ # invalid configuations can be added as tuples of adjustable
+ # resolution "AN521" will reject all combinations for that
+ # platform while ("AN521", "GNUARM") will only reject GCC ones
+ "invalid": [("MUSCA_A", "*", "*", "*", False),
+ ("MUSCA_B1", "*", "*", "*", False)]
+ }
+
+config_MUSCA_A = {"seed_params": {
+ "target_platform": ["MUSCA_A"],
+ "compiler": ["GNUARM"],
+ "proj_config": ["ConfigRegression",
+ "ConfigCoreIPC",
+ "ConfigCoreIPCTfmLevel2",
+ "ConfigDefault"],
+ "cmake_build_type": ["Debug", "Release"],
+ "with_mcuboot": [True],
+ },
+ "common_params": _common_tfm_builder_cfg,
+ # invalid configuations can be added as tuples of adjustable
+ # resolution "AN521" will reject all combinations for that
+ # platform while ("AN521", "GNUARM") will only reject GCC
+ "invalid": [("MUSCA_A", "*", "*", "*", False)]
+ }
+
+config_MUSCA_B1 = {"seed_params": {
+ "target_platform": ["MUSCA_B1"],
+ "compiler": ["GNUARM"],
+ "proj_config": ["ConfigRegression",
+ "ConfigCoreIPC",
+ "ConfigCoreIPCTfmLevel2",
+ "ConfigDefault"],
+ "cmake_build_type": ["Debug", "Release"],
+ "with_mcuboot": [True],
+ },
+ "common_params": _common_tfm_builder_cfg,
+ # invalid configuations can be added as tuples of adjustable
+ # resolution "AN521" will reject all combinations for that
+ # platform while ("AN521", "GNUARM") will only reject GCC
+ "invalid": [("MUSCA_B1", "*", "*", "*", False)]
+ }
+
+# Configruation used for document building
+config_doxygen = {"common_params": {
+ "config_type": "tf-m_documents",
+ "codebase_root_dir": "tf-m",
+ "build_cmds": {"all": ["cmake -G \"Unix Makefiles\" "
+ "-DPROJ_CONFIG=`readlink -f "
+ "%(_tbm_code_dir_)s/"
+ "configs/ConfigDefault.cmake` "
+ "-DTARGET_PLATFORM=AN521 "
+ "-DCOMPILER=GNUARM "
+ "-DCMAKE_BUILD_TYPE=Debug "
+ "-DBL2=True "
+ "%(_tbm_code_dir_)s/",
+ "cmake --build ./ -- install_doc",
+ "cmake --build ./ "
+ "-- install_userguide"]},
+ "artifact_capture_rex": r'%(_tbm_build_dir_)s/install/'
+ r'doc/reference_manual/(?:pdf|html)'
+ r'/(\w+\.(?:html|md|pdf))$',
+ },
+ "invalid": []
+ }
+
+# Configruation used in testing
+config_debug = {"seed_params": {
+ "target_platform": ["AN521"],
+ "compiler": ["ARMCLANG"],
+ "proj_config": ["ConfigDefault"],
+ "cmake_build_type": ["Debug"],
+ "with_mcuboot": [True],
+ },
+ "common_params": _common_tfm_builder_cfg,
+ # invalid configuations can be added as tuples of adjustable
+ # resolution "AN521" will reject all combinations for that
+ # platform while ("AN521", "GNUARM") will only reject GCC ones
+ "invalid": [("*", "GNUARM", "*", "*", False),
+ ("AN521", "ARMCLANG", "ConfigRegression",
+ "Release", False),
+ ]
+ }
+
+_builtin_configs = {"full": config_full,
+ "an521": config_AN521,
+ "an519": config_AN519,
+ "musca_a": config_MUSCA_A,
+ "musca_b1": config_MUSCA_B1,
+ "ipc": config_IPC,
+ "doxygen": config_doxygen,
+ "debug": config_debug}
if __name__ == '__main__':
import os
- import sys
- try:
- from tfm_ci_pylib.utils import export_config_map
- except ImportError:
- dir_path = os.path.dirname(os.path.realpath(__file__))
- sys.path.append(os.path.join(dir_path, "../"))
- from tfm_ci_pylib.utils import export_config_map
- if len(sys.argv) == 2:
- if sys.argv[1] == "--export":
- export_config_map(_builtin_configs)
- if len(sys.argv) == 3:
- if sys.argv[1] == "--export":
- export_config_map(_builtin_configs, sys.argv[2])
+ # Default behavior is to export refference config when called
+ _dir = os.getcwd()
+ from utils import save_json
+ for _cname, _cfg in _builtin_configs.items():
+ _fname = os.path.join(_dir, _cname + ".json")
+ print("Exporting config %s" % _fname)
+ save_json(_fname, _cfg)
diff --git a/checkpatch/checkpatch.conf b/checkpatch/checkpatch.conf
index e49ba0f..0a20b50 100644
--- a/checkpatch/checkpatch.conf
+++ b/checkpatch/checkpatch.conf
@@ -17,4 +17,5 @@
--ignore BRACES
--ignore CONST_STRUCT
--ignore INITIALISED_STATIC
+--ignore SPDX_LICENSE_TAG
--show-types
diff --git a/cppcheck/tfm-suppress-list.txt b/cppcheck/tfm-suppress-list.txt
index 368edb6..d59b338 100644
--- a/cppcheck/tfm-suppress-list.txt
+++ b/cppcheck/tfm-suppress-list.txt
@@ -53,3 +53,8 @@
//arm_cmse.h is a special system include, stop complaining about it.
missingInclude:*/tfm_core.h:11
missingInclude:*/tfm_secure_api.h:11
+
+//Exclude external qcbor code which does not comply with guidelines
+*:*/lib/ext/qcbor/*
+*:*/bl2/ext/*
+*:*/platform/ext/*
diff --git a/doc/.gitignore b/doc/.gitignore
index cb3200d..c059312 100644
--- a/doc/.gitignore
+++ b/doc/.gitignore
@@ -1,5 +1,5 @@
#-------------------------------------------------------------------------------
-# Copyright (c) 2017, Arm Limited and Contributors. All rights reserved.
+# Copyright (c) 2017-2019, Arm Limited and Contributors. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
diff --git a/fastmodel_dispatcher/AN519.py b/fastmodel_dispatcher/AN519.py
new file mode 100644
index 0000000..a03bf97
--- /dev/null
+++ b/fastmodel_dispatcher/AN519.py
@@ -0,0 +1,319 @@
+#!/usr/bin/env python3
+
+""" an521.py:
+
+ Contains AN519 specific configuration variants. Each configuration is
+ created by a template(defines the expected output of the test) and a
+ decorated class setting the parameters. The whole scope of this module
+ is imported in the config map, to avoid keeping a manual list of the
+ configurations up to date. """
+
+from __future__ import print_function
+import os
+import sys
+
+__copyright__ = """
+/*
+ * Copyright (c) 2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+try:
+ from tfm_ci_pylib.fastmodel_wrapper import FastmodelConfigMap
+ from tfm_ci_pylib.fastmodel_wrapper import config_variant
+
+ from tfm_ci_pylib.fastmodel_wrapper import \
+ template_default_config, template_regression_config, \
+ template_coreipc_config, template_coreipctfmlevel2_config
+except ImportError:
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(os.path.join(dir_path, "../"))
+ from tfm_ci_pylib.fastmodel_wrapper import FastmodelConfigMap
+ from tfm_ci_pylib.fastmodel_wrapper import config_variant
+ from tfm_ci_pylib.fastmodel_wrapper import \
+ template_default_config, template_regression_config, \
+ template_coreipc_config, template_coreipctfmlevel2_config
+# ===================== AN521 Configuration Classes ======================
+# Configurations will be dynamically defined
+
+# ===================== Default Config ======================
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_armclang_configdefault_debug_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_gnuarm_configdefault_debug_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_armclang_configdefault_debug_nobl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_gnuarm_configdefault_debug_nobl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an519_armclang_configdefault_release_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an519_gnuarm_configdefault_release_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_armclang_configdefault_release_nobl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_gnuarm_configdefault_release_nobl2(template_default_config):
+ pass
+
+# ===================== Regressions Config ======================
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_armclang_configregression_debug_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_gnuarm_configregression_debug_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_armclang_configregression_debug_nobl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_gnuarm_configregression_debug_nobl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an519_armclang_configregression_release_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an519_gnuarm_configregression_release_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_armclang_configregression_release_nobl2(
+ template_regression_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_gnuarm_configregression_release_nobl2(template_regression_config):
+ pass
+
+# ===================== CoreIPC Config ======================
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_armclang_configcoreipc_debug_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_armclang_configcoreipc_debug_nobl2(template_coreipc_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an519_armclang_configcoreipc_release_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_armclang_configcoreipc_release_nobl2(template_coreipc_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_gnuarm_configcoreipc_debug_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_gnuarm_configcoreipc_debug_nobl2(template_coreipc_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an519_gnuarm_configcoreipc_release_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_gnuarm_configcoreipc_release_nobl2(template_coreipc_config):
+ pass
+
+# ===================== CoreIPCTfmLevel2 Config ======================
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_armclang_configcoreipctfmlevel2_debug_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_armclang_configcoreipctfmlevel2_debug_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an519_armclang_configcoreipctfmlevel2_release_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_armclang_configcoreipctfmlevel2_release_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an519_gnuarm_configcoreipctfmlevel2_debug_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an519_gnuarm_configcoreipctfmlevel2_debug_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an519_gnuarm_configcoreipctfmlevel2_release_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN519",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an519_gnuarm_configcoreipctfmlevel2_release_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+AN519 = FastmodelConfigMap(globals(), "AN519")
+
+if __name__ == "__main__":
+ pass
diff --git a/fastmodel_dispatcher/AN521.py b/fastmodel_dispatcher/AN521.py
new file mode 100644
index 0000000..8953aa1
--- /dev/null
+++ b/fastmodel_dispatcher/AN521.py
@@ -0,0 +1,319 @@
+#!/usr/bin/env python3
+
+""" an521.py:
+
+ Contains AN521 specific configuration variants. Each configuration is
+ created by a template(defines the expected output of the test) and a
+ decorated class setting the parameters. The whole scope of this module
+ is imported in the config map, to avoid keeping a manual list of the
+ configurations up to date. """
+
+from __future__ import print_function
+import os
+import sys
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+try:
+ from tfm_ci_pylib.fastmodel_wrapper import FastmodelConfigMap
+ from tfm_ci_pylib.fastmodel_wrapper import config_variant
+
+ from tfm_ci_pylib.fastmodel_wrapper import \
+ template_default_config, template_regression_config, \
+ template_coreipc_config, template_coreipctfmlevel2_config
+except ImportError:
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(os.path.join(dir_path, "../"))
+ from tfm_ci_pylib.fastmodel_wrapper import FastmodelConfigMap
+ from tfm_ci_pylib.fastmodel_wrapper import config_variant
+ from tfm_ci_pylib.fastmodel_wrapper import \
+ template_default_config, template_regression_config, \
+ template_coreipc_config, template_coreipctfmlevel2_config
+
+# ===================== AN521 Configuration Classes ======================
+# Configurations will be dynamically defined
+
+# ===================== Default Config ======================
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_armclang_configdefault_debug_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_gnuarm_configdefault_debug_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_armclang_configdefault_debug_nobl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_gnuarm_configdefault_debug_nobl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an521_armclang_configdefault_release_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an521_gnuarm_configdefault_release_bl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_armclang_configdefault_release_nobl2(template_default_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_gnuarm_configdefault_release_nobl2(template_default_config):
+ pass
+
+# ===================== Regressions Config ======================
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_armclang_configregression_debug_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_gnuarm_configregression_debug_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_armclang_configregression_debug_nobl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_gnuarm_configregression_debug_nobl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an521_armclang_configregression_release_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an521_gnuarm_configregression_release_bl2(template_regression_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_armclang_configregression_release_nobl2(
+ template_regression_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_gnuarm_configregression_release_nobl2(template_regression_config):
+ pass
+
+# ===================== CoreIPC Config ======================
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_armclang_configcoreipc_debug_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_armclang_configcoreipc_debug_nobl2(template_coreipc_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an521_armclang_configcoreipc_release_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_armclang_configcoreipc_release_nobl2(template_coreipc_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_gnuarm_configcoreipc_debug_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_gnuarm_configcoreipc_debug_nobl2(template_coreipc_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an521_gnuarm_configcoreipc_release_bl2(template_coreipc_config):
+
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_gnuarm_configcoreipc_release_nobl2(template_coreipc_config):
+ pass
+
+# ===================== CoreIPCTfmLevel2 Config ======================
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_armclang_configcoreipctfmlevel2_debug_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_armclang_configcoreipctfmlevel2_debug_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="BL2")
+class an521_armclang_configcoreipctfmlevel2_release_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="ARMCLANG",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_armclang_configcoreipctfmlevel2_release_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="BL2")
+class an521_gnuarm_configcoreipctfmlevel2_debug_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Debug",
+ bootloader="NOBL2")
+class an521_gnuarm_configcoreipctfmlevel2_debug_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="BL2")
+class an521_gnuarm_configcoreipctfmlevel2_release_bl2(template_coreipctfmlevel2_config):
+ pass
+
+
+@config_variant(platform="AN521",
+ compiler="GNUARM",
+ build_type="Release",
+ bootloader="NOBL2")
+class an521_gnuarm_configcoreipctfmlevel2_release_nobl2(template_coreipctfmlevel2_config):
+ pass
+
+AN521 = FastmodelConfigMap(globals(), "AN521")
+
+if __name__ == "__main__":
+ pass
diff --git a/fastmodel_dispatcher/fastmodel_dispatcher.py b/fastmodel_dispatcher/fastmodel_dispatcher.py
new file mode 100644
index 0000000..8c12dae
--- /dev/null
+++ b/fastmodel_dispatcher/fastmodel_dispatcher.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python3
+
+""" fvp_dispatcher.py:
+
+ Fastmodel dispatcher takes an build report input from build_helper and
+ selects the appropriate tests, lauched in separate fastmodel Wrapper
+ instances """
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+import os
+import sys
+import argparse
+from copy import deepcopy
+from fastmodel_dispatcher_configs import fvp_config_map
+
+try:
+ from tfm_ci_pylib.utils import load_json, print_test, save_json, \
+ show_progress
+ from tfm_ci_pylib.fastmodel_wrapper import FastmodelWrapper
+ from tfm_ci_pylib.tfm_build_manager import TFM_Build_Manager
+
+except ImportError:
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(os.path.join(dir_path, "../"))
+ from tfm_ci_pylib.utils import load_json, print_test, save_json, \
+ show_progress
+ from tfm_ci_pylib.fastmodel_wrapper import FastmodelWrapper
+ from tfm_ci_pylib.tfm_build_manager import TFM_Build_Manager
+
+
+def cfilter(config_list, match):
+ """Filter a list of items in _text1_text2_ format and only include
+ results who contain the match term between two underscores """
+
+ # Ensure the match has the format of _text_
+ match = "_%s_" % match.strip("_")
+
+ return [n for n in config_list if match in n]
+
+
+def main(user_args):
+ """ Main logic """
+
+ test_config_list = None
+
+ if user_args.list_cfg:
+ print("Built-in configs:")
+ print("\n".join(fvp_config_map.list()))
+ sys.exit(0)
+ elif user_args.single_cfg:
+ try:
+ # Try to fetch the config to validate it exists
+ fvp_config_map.get_config(user_args.single_cfg)
+ test_config_list = [user_args.single_cfg]
+ except Exception as e:
+ print("Error: %s" % e)
+ sys.exit(1)
+ elif user_args.build_all:
+ test_config_list = fvp_config_map.list()
+ # If a build report is provided parse it
+ elif user_args.build_report:
+ build_report = load_json(user_args.build_report)
+
+ build_cfg = build_report["_metadata_"]["input_build_cfg"]
+
+ # build and test configs share common key name enties
+ config_list = list(map(str.lower,
+ (map(str, build_report["report"].keys()))))
+
+ # Only choose the tests that have been defined in the map
+ test_config_list = [n for n in fvp_config_map.list()
+ if n in config_list]
+
+ # Use the Build manager to calcuate the rejection list in the same
+ # manner.
+ rj = TFM_Build_Manager.generate_rejection_list(
+ build_cfg["seed_params"],
+ build_cfg["common_params"],
+ fvp_config_map.get_invalid()).keys()
+
+ # Remove every config that is included in the rejection.
+ # Ignore generated rejection configs that have not been set in the
+ # test map.
+ for name in rj:
+ name = name.lower()
+ try:
+ test_config_list.pop(test_config_list.index(name))
+ print("Rejecting config %s" % name)
+ except Exception as e:
+ print("Rejection ignored with exception:", e)
+ else:
+ print("Noting to do. Please provide a report or a config name to test")
+ sys.exit(1)
+
+ # Apply filters if specified by user
+ if user_args.build_armclang:
+ test_config_list = cfilter(test_config_list, "armclang")
+ elif user_args.build_gnuarm:
+ test_config_list = cfilter(test_config_list, "gnuarm")
+ elif user_args.filter:
+ test_config_list = cfilter(test_config_list, user_args.filter)
+ else:
+ pass
+
+ print("Working on Test list: \n%s" % "\n".join(sorted(test_config_list)))
+
+ if user_args.p_command:
+
+ for test_cfg in test_config_list:
+
+ test_cfg_obj = fvp_config_map.get_config_object(test_cfg)
+ _tmp_cfg = FastmodelWrapper(fvp_cfg=test_cfg_obj.get_config())
+
+ print("\nCommand line:")
+ print("")
+ _tmp_cfg.show_cmd()
+ print("\n")
+ sys.exit(0)
+
+ # Run tests
+ rep = []
+ test_count = 0
+ for test_cfg in test_config_list:
+
+ # Check if the config hardcoded binary path is same as the one
+ # in the build report. If not update the config
+ test_cfg_obj = fvp_config_map.get_config_object(test_cfg)
+
+ rep.append(FastmodelWrapper(
+ fvp_cfg=test_cfg_obj.get_config())
+ .start().block_wait().test().save_report().get_report())
+ test_count += 1
+ print("Testing progress:")
+ show_progress(test_count, len(test_config_list))
+
+ # Export the report in a file
+ if user_args.report:
+ f_report = {"report": {}, "_metadata_": {}}
+ f_report["report"] = {k["name"]: deepcopy(k) for k in rep}
+ save_json(user_args.report, f_report)
+
+ sl = [x["name"] for x in rep if x["success"] is True]
+ fl = [x["name"] for x in rep if x["success"] is False]
+
+ print("\n")
+
+ if sl:
+ print_test(t_list=sl, status="passed", tname="Tests")
+ if fl:
+ print_test(t_list=fl, status="failed", tname="Tests")
+ if user_args.eif:
+ sys.exit(1)
+
+
+def get_cmd_args():
+ """ Parse command line arguments """
+
+ # Parse command line arguments to override config
+ parser = argparse.ArgumentParser(description="TFM Fastmodel wrapper.")
+ parser.add_argument("-b", "--build_report",
+ dest="build_report",
+ action="store",
+ help="JSON file produced by build_helper (input)")
+ parser.add_argument("-a", "--build_all",
+ dest="build_all",
+ action="store_true",
+ help="If set build every configuration combination")
+ parser.add_argument("-e", "--error_if_failed",
+ dest="eif",
+ action="store_true",
+ help="If set will change the script exit code if one "
+ "or more tests fail")
+ parser.add_argument("-r", "--report",
+ dest="report",
+ action="store",
+ help="JSON file containing fastmodel report (output)")
+ parser.add_argument("-g", "--build_gnuarm",
+ dest="build_gnuarm",
+ action="store_true",
+ help="If set build every gnuarm configuration")
+ parser.add_argument("-c", "--build_armclang",
+ dest="build_armclang",
+ action="store_true",
+ help="If set build every armclang configuration")
+ parser.add_argument("-f", "--filter",
+ dest="filter",
+ action="store",
+ help="Only select configs that contain this string")
+ parser.add_argument("-l", "--list-configs",
+ dest="list_cfg",
+ action="store_true",
+ help="Print a list of the built-in configurations and"
+ "exit")
+ parser.add_argument("-s", "--single-config",
+ dest="single_cfg",
+ action="store",
+ help="Launch testing for a single built-in config, "
+ "picked by name")
+ parser.add_argument("-p", "--print-command",
+ dest="p_command",
+ action="store_true",
+ help="Print the FPV launch command to console & exit")
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ main(get_cmd_args())
diff --git a/fastmodel_dispatcher/fastmodel_dispatcher_configs.py b/fastmodel_dispatcher/fastmodel_dispatcher_configs.py
new file mode 100644
index 0000000..4b698a9
--- /dev/null
+++ b/fastmodel_dispatcher/fastmodel_dispatcher_configs.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python3
+
+""" run_fvp_configs.py:
+
+ Using Python clas inheritance model to generate modular and easily to scale
+ configuration models for the run_fpv module. Configuration data is also
+ combined with helper methods. If the file is run as a standalone file,
+ it can save json configuration files to disk if requested by --export
+ directive """
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+import sys
+from AN521 import AN521
+from AN519 import AN519
+
+fvp_config_map = AN521 + AN519
+
+if __name__ == "__main__":
+ # Create Json configuration files on user request
+
+ if len(sys.argv) >= 2:
+ if sys.argv[1] == "--export":
+
+ for platform in fvp_config_map.get_object_map().values():
+ for config in platform.values():
+ config.json_to_file()
diff --git a/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2 b/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
index b397065..b80884c 100644
--- a/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
+++ b/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
@@ -14,7 +14,7 @@
minutes: {{ action_timeout }}
actions:
power-off:
- seconds: 30
+ seconds: {{ poweroff_timeout }}
connections:
lava-test-monitor:
minutes: {{ monitor_timeout }}
@@ -23,30 +23,30 @@
visibility: public
actions:
-{%- for platform, recovery in platforms.items() %}
+{%- for platform, recovery in platforms.items()|sort(reverse=false) %}
- deploy:
to: mps
images:
recovery_image:
- url: {{ recovery_store_url }}/lastSuccessfulBuild/artifact/{{ recovery }}
+ url: {{ recovery_store_url }}/{{ build_no }}/artifact/{{ recovery }}
compression: gz
- namespace: target
- {%- for compiler in compilers %}
- {%- for build_type in build_types %}
- {%- for boot_type in boot_types %}
- {% for name, test in tests.items() %}
+ namespace: target_{{ recovery | replace(".tar.gz", "") }}
+{% for compiler in compilers|sort(reverse=true) %}
+ {%- for build_type in build_types|sort(reverse=false) %}
+ {%- for boot_type in boot_types|sort(reverse=false) %}
+ {%- for name, test in tests.items()|sort(reverse=false) %}
- deploy:
to: mps
images:
test_binary:
- url: {{artifact_store_url}}/{{ build_no}}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/{{ test.binaries.firmware }}
+ url: {{artifact_store_url}}/{{ build_no }}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/install/outputs/{{ platform }}/{{ test.binaries.firmware }}
namespace: target
- deploy:
to: mps
images:
test_binary:
- url: {{artifact_store_url}}/{{ build_no}}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/{{ test.binaries.bootloader }}
+ url: {{artifact_store_url}}/{{ build_no }}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/install/outputs/{{ platform }}/{{ test.binaries.bootloader }}
namespace: target
- boot:
@@ -66,8 +66,9 @@
fixupdict:
'{{monitor.fixup.pass}}': pass
'{{monitor.fixup.fail}}': fail
+
{%- endfor %}
- {% endfor %}
+ {%- endfor %}
{%- endfor %}
{%- endfor %}
{%- endfor %}
diff --git a/lava_helper/lava_helper.py b/lava_helper/lava_helper.py
index 4e8ed88..783ed04 100755
--- a/lava_helper/lava_helper.py
+++ b/lava_helper/lava_helper.py
@@ -19,7 +19,7 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import os
import sys
@@ -31,13 +31,13 @@
try:
from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
- load_yaml, test
+ load_yaml, test, print_test
from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
except ImportError:
dir_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(dir_path, "../"))
from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
- load_yaml, test
+ load_yaml, test, print_test
from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
@@ -165,26 +165,60 @@
# Call the formatter
list(map(format_results, test_results))
+ # Remove the ignored commits if requested
+ if user_args.ignore_configs:
+ print(user_args.ignore_configs)
+ for cfg in user_args.ignore_configs:
+ try:
+ print("Rejecting config: ", cfg)
+ t_dict.pop(cfg)
+ except KeyError as e:
+ print("Warning! Rejected config %s not found"
+ " in LAVA results" % cfg)
+
# We need to check that each of the tests contained in the test_map exist
# AND that they have a passed status
t_sum = 0
+
+ with open("lava_job.url", "r") as F:
+ job_url = F.read().strip()
+
+ out_rep = {"report": {},
+ "_metadata_": {"job_url": job_url}}
for k, v in t_dict.items():
try:
- t_sum += int(test(test_map[k],
- v,
- pass_text=["pass"],
- error_on_failed=False,
- test_name=k,
- summary=user_args.lava_summary)["success"])
+ out_rep["report"][k] = test(test_map[k],
+ v,
+ pass_text=["pass"],
+ error_on_failed=False,
+ test_name=k,
+ summary=user_args.lava_summary)
+ t_sum += int(out_rep["report"][k]["success"])
# Status can be None if a test did't fully run/complete
except TypeError as E:
t_sum = 1
+ print("\n")
+ sl = [x["name"] for x in out_rep["report"].values()
+ if x["success"] is True]
+ fl = [x["name"] for x in out_rep["report"].values()
+ if x["success"] is False]
+
+ if sl:
+ print_test(t_list=sl, status="passed", tname="Tests")
+ if fl:
+ print_test(t_list=fl, status="failed", tname="Tests")
+
+ # Generate the output report is requested
+ if user_args.output_report:
+ save_json(user_args.output_report, out_rep)
# Every single of the tests need to have passed for group to succeed
if t_sum != len(t_dict):
print("Group Testing FAILED!")
- sys.exit(1)
- print("Group Testing PASS!")
+ if user_args.eif:
+ sys.exit(1)
+ else:
+ print("Group Testing PASS!")
def test_lava_dispatch_credentials(user_args):
@@ -226,10 +260,19 @@
lava = test_lava_dispatch_credentials(user_args)
job_id, job_url = lava.submit_job(user_args.dispatch)
- print("Job submitted at: " + job_url)
+
+ # The reason of failure will be reported to user by LAVA_RPC_connector
+ if job_id is None and job_url is None:
+ sys.exit(1)
+ else:
+ print("Job submitted at: " + job_url)
+
with open("lava_job.id", "w") as F:
F.write(str(job_id))
print("Job id %s stored at lava_job.id file." % job_id)
+ with open("lava_job.url", "w") as F:
+ F.write(str(job_url))
+ print("Job url %s stored at lava_job.url file." % job_id)
# Wait for the job to complete
status = lava.block_wait_for_job(job_id, int(user_args.dispatch_timeout))
@@ -270,6 +313,18 @@
config["build_no"] = user_args.build_no
+ # Override with command line provided URL/Job Name
+ if user_args.jenkins_url:
+ _over_d = {"jenkins_url": user_args.jenkins_url,
+ "jenkins_job": "%(jenkins_job)s"}
+ config["recovery_store_url"] = config["recovery_store_url"] % _over_d
+ config["artifact_store_url"] = config["artifact_store_url"] % _over_d
+
+ if user_args.jenkins_job:
+ _over_d = {"jenkins_job": user_args.jenkins_job}
+ config["recovery_store_url"] = config["recovery_store_url"] % _over_d
+ config["artifact_store_url"] = config["artifact_store_url"] % _over_d
+
# Add the template folder
config["templ"] = os.path.join(user_args.template_dir, config["templ"])
return config
@@ -289,6 +344,8 @@
save_config(config_file, lava_gen_config_map[config_key])
print("Configuration exported at %s" % config_file)
return
+ if user_args.dispatch is not None or user_args.dispatch_cancel is not None:
+ pass
else:
config = load_config_overrides(user_args)
@@ -372,7 +429,18 @@
dest="platform",
action="store",
help="Override platform.Only the provided one "
- "will be tested ")
+ "will be tested")
+ over_g.add_argument("-ou", "--override-jenkins-url",
+ dest="jenkins_url",
+ action="store",
+ help="Override %(jenkins_url)s params in config if "
+ "present. Sets the jenkings address including "
+ "port")
+ over_g.add_argument("-oj", "--override-jenkins-job",
+ dest="jenkins_job",
+ action="store",
+ help="Override %(jenkins_job)s params in config if "
+ "present. Sets the jenkings job name")
parse_g.add_argument("-tp", "--task-lava-parse",
dest="lava_results",
action="store",
@@ -381,9 +449,24 @@
" of testing")
parse_g.add_argument("-ls", "--lava-parse-summary",
dest="lava_summary",
- default=True,
+ default=False,
action="store_true",
help="Print full test summary")
+ parse_g.add_argument("-or", "--output-report",
+ dest="output_report",
+ action="store",
+ help="Print full test summary")
+ parser.add_argument("-ef", "--error-if-failed",
+ dest="eif",
+ action="store_true",
+ help="If set will change the script exit code if one "
+ "or more tests fail")
+ parser.add_argument('-ic', '--ignore-configs',
+ dest="ignore_configs",
+ nargs='+',
+ help="Pass a space separated list of build"
+ "configurations which will get ignored when"
+ "evaluation LAVA results")
# Lava job control commands
disp_g.add_argument("-td", "--task-dispatch",
diff --git a/lava_helper/lava_helper_configs.py b/lava_helper/lava_helper_configs.py
index 100b0ed..a5cf8c1 100644
--- a/lava_helper/lava_helper_configs.py
+++ b/lava_helper/lava_helper_configs.py
@@ -18,7 +18,7 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
def lava_gen_get_config_subset(config,
@@ -35,6 +35,9 @@
# Remove all configs not requests by the caller
if not default:
tests.pop("Default")
+ if not core:
+ tests.pop("CoreIPC")
+ tests.pop("CoreIPCTfmLevel2")
if not regression:
tests.pop("Regression")
@@ -45,26 +48,24 @@
tfm_mps2_sse_200 = {
"templ": "template_tfm_mps2_sse_200.jinja2",
"job_name": "mps2plus-arm-tfm",
- "device_type": "mps",
- "job_timeout": 60,
- "action_timeout": 60,
- "monitor_timeout": 60,
- "recovery_store_url": "https://ci.trustedfirmware.org/"
- "job/tf-m-fpga-image-store",
- "artifact_store_url": "https://ci.trustedfirmware.org/"
- "job/tf-m-build-test-review",
- "platforms": {"AN521": "mps2_sse200_an512.tar.gz"},
+ "device_type": "mps2plus",
+ "job_timeout": 120,
+ "action_timeout": 90,
+ "monitor_timeout": 90,
+ "poweroff_timeout": 10,
+ "recovery_store_url": "%(jenkins_url)s/"
+ "job/%(jenkins_job)s",
+ "artifact_store_url": "%(jenkins_url)s/"
+ "job/%(jenkins_job)s",
+ "platforms": {"AN521": "mps2_an521_v3.0.tar.gz"},
"compilers": ["GNUARM"],
- "build_types": ["Debug"],
+ "build_types": ["Debug", "Release"],
"boot_types": ["BL2"],
"tests": {
'Default': {
- "recovery": "mps2_sse200_an512.tar.gz",
"binaries": {
- "firmware":
- "install/outputs/AN521/tfm_sign.bin",
- "bootloader":
- "install/outputs/AN521/mcuboot.bin"
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
},
"monitors": [
{
@@ -80,10 +81,9 @@
]
}, # Default
'Regression': {
- "recovery": "mps2_sse200_an512.tar.gz",
"binaries": {
- "firmware": "install/outputs/AN521/tfm_sign.bin",
- "bootloader": "install/outputs/AN521/mcuboot.bin"
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
},
"monitors": [
{
@@ -131,6 +131,42 @@
}
] # Monitors
}, # Regression
+ 'CoreIPC': {
+ "binaries": {
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': 'Jumping to the first image slot',
+ 'end': '\\x1b\\\[0m',
+ 'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+ r'(?P<test_case_id>Secure image '
+ r'initializing)(?P<result>!)',
+ 'fixup': {"pass": "!", "fail": ""},
+ 'required': ["secure_image_initializing"]
+ } # Monitors
+ ]
+ }, # CoreIPC
+ 'CoreIPCTfmLevel2': {
+ "binaries": {
+ "firmware": "tfm_sign.bin",
+ "bootloader": "mcuboot.bin"
+ },
+ "monitors": [
+ {
+ 'name': 'Secure_Test_Suites_Summary',
+ 'start': 'Jumping to the first image slot',
+ 'end': '\\x1b\\\[0m',
+ 'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+ r'(?P<test_case_id>Secure image '
+ r'initializing)(?P<result>!)',
+ 'fixup': {"pass": "!", "fail": ""},
+ 'required': ["secure_image_initializing"]
+ } # Monitors
+ ]
+ }, # CoreIPCTfmLevel2
} # Tests
}
diff --git a/make_checkpatch_summary.sh b/make_checkpatch_summary.sh
deleted file mode 100755
index 8e068af..0000000
--- a/make_checkpatch_summary.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-#-------------------------------------------------------------------------------
-# Copyright (c) 2018-2019, Arm Limited and Contributors. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-#-------------------------------------------------------------------------------
-
-##
-##@file
-##@brief This script is to make a summary of run-checkpatch.sh generated output
-##files.
-##
-##The generated summary will hold the number of error and warning messages for
-##each file.
-##
-##The first parameter of the script must be the location of input file.
-##
-
-#Check parameter
-if [ -z ${1+x} ]
-then
- echo "Checkpatch output file not specified!"
- exit 1
-fi
-
-infile="$1"
-
-#Find the summary line for each file. Cut the summary line plus the file name
-#the previous line.
-#Concatenate the current line to the previos one,
-#Print the two lines match the following regexp:
-# remember anything any number of non : characters (this is the file path)
-# followed by a :
-# match any nuber of following characters till "total:" is found
-# remember all characters after "total:" (this is the summary)
-# replace the matched string with first and and the second match concatenated
-# with new line and a tab character in between.
-# we use s: single line and m: multi line modificators for the regexp match
-res=$(perl -ne '$l=$l.$_; print "$l" if $l=~s/.*?([^:]+):.*\ntotal:(.*)/$1:\n\t$2/sm;$l=$_;' "$infile")
-
-#Print the result to standard output.
-cat <<EOM
-Checkpatch result summary:
-$res
-EOM
diff --git a/make_cppcheck_summary.sh b/make_cppcheck_summary.sh
deleted file mode 100755
index aff1909..0000000
--- a/make_cppcheck_summary.sh
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-#-------------------------------------------------------------------------------
-# Copyright (c) 2018-2019, Arm Limited and Contributors. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-#-------------------------------------------------------------------------------
-
-#Fail if any executed command fails.
-set -e
-
-##
-##@file
-##@brief This script is to make a summary of cppcheck XML output files.
-##
-##The generated summary will hold the number of messages of each severity type.
-##
-##The first parameter of the script must be the location of the XML file.
-##
-##The script uses regual expressions to identify and count messages.
-##
-##Usage:
-## command | result
-## --------|-------
-## make_cppcheck_summary.sh foo/bar/build.xml | Summary text.
-##
-
-#Check parameter
-if [ -z ${1+x} ]
-then
- echo "Cppcheck output file not specified!"
- exit 1
-fi
-
-xml_file="$1"
-
-#List of error types cmake reports.
-severity_list=( "none" "error" "warning" "style" "performance" "portability"
- "information" "debug")
-
-#Count each severity type and build result message.
-for severity in "${severity_list[@]}"
-do
- #Count lines with this severity type.
- n=$(grep -c "severity=\"$severity\"" "$xml_file" || true)
- #Start of report line
- line=$'\n\tIssues with severity '"\"$severity\":"
- #Indentatin to character position 46.
- indent=$(eval "printf ' %.0s' {1..$(( 46-${#line} ))}")
- #Add identation and number
- line="$line$indent$n"
- #Extend issue list
- issue_list="$issue_list$line"
-done
-msg="Cppcheck results: $issue_list"
-
-echo "$msg"
diff --git a/report_parser/report_parser.py b/report_parser/report_parser.py
new file mode 100644
index 0000000..859ebe7
--- /dev/null
+++ b/report_parser/report_parser.py
@@ -0,0 +1,573 @@
+#!/usr/bin/env python3
+
+""" report_parser.py:
+
+ Report parser parses openci json reports and conveys the invormation in a
+ one or more standard formats (To be implememented)
+
+ After all information is captured it validates the success/failure status
+ and can change the script exit code for intergration with standard CI
+ executors.
+ """
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+
+import os
+import re
+import sys
+import json
+import argparse
+from pprint import pprint
+
+try:
+ from tfm_ci_pylib.utils import load_json, get_local_git_info, \
+ save_json, list_subdirs, get_remote_git_info, \
+ convert_git_ref_path, xml_read
+except ImportError:
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(os.path.join(dir_path, "../"))
+
+ from tfm_ci_pylib.utils import load_json, get_local_git_info, \
+ save_json, list_subdirs, get_remote_git_info, \
+ convert_git_ref_path, xml_read
+
+
+def split_keys(joint_arg, sep="="):
+ """ Split two keys spread by a separator, and return them as a tuple
+ with whitespace removed """
+
+ keys = joint_arg.split(sep)
+
+ # Remove whitespace
+ keys = map(str.strip, list(keys))
+ # If key contains the word True/False convert it.
+ keys = list(map(lambda x:
+ eval(x.title()) if x.lower() in ["true", "false"] else x,
+ keys))
+ return keys
+
+
+def dependencies_mdt_collect(path_list,
+ out_f=None,
+ expected_paths=["mbedtls",
+ "mbedcrypto",
+ "cmsis",
+ "checkpatch"]):
+ """ Collect dependencies checkout metadata. It creates a json report which
+ can be optionally exported to a file """
+
+ cpaths = {k: v for k, v in [n.split("=") for n in path_list]}
+ cwd = os.path.abspath(os.getcwd())
+
+ # Create an empty dataset
+ data = {n: {} for n in set(expected_paths).union(set(cpaths.keys()))}
+
+ # Perform basic sanity check
+ if not set(data.keys()).issubset(set(cpaths.keys())):
+ err_msg = "Error locating required paths.\nNeeded: %s\nHas: %s" % (
+ ",".join(data.keys()), ",".join(cpaths.keys())
+ )
+ print(err_msg)
+ raise Exception(err_msg)
+
+ for d in list_subdirs(cpaths["mbedtls"]):
+ print("mbedtls dir: ", d)
+ # if checkout directory name contains a git reference convert to short
+ d = convert_git_ref_path(d)
+
+ git_info = get_local_git_info(d)
+ tag = os.path.split(git_info["dir"])[-1].split("-")[-1]
+
+ # Absolute paths will not work in jenkins since it will change the
+ # workspaace directory between stages convert to relative path
+ git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
+ data["mbedtls"][tag] = git_info
+
+ for d in list_subdirs(cpaths["mbedcrypto"]):
+ print("mbed-crypto dir: ", d)
+ # if checkout directory name contains a git reference convert to short
+ d = convert_git_ref_path(d)
+
+ git_info = get_local_git_info(d)
+ tag = os.path.split(git_info["dir"])[-1].split("-")[-1]
+
+ # Absolute paths will not work in jenkins since it will change the
+ # workspaace directory between stages convert to relative path
+ git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
+ data["mbedcrypto"][tag] = git_info
+
+ for d in list_subdirs(cpaths["cmsis"]):
+ print("CMS subdir: ", d)
+ d = convert_git_ref_path(d)
+ git_info = get_local_git_info(d)
+ tag = os.path.split(git_info["dir"])[-1]
+
+ # Absolute paths will not work in jenkins since it will change the
+ # workspaace directory between stages convert to relative path
+ git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
+ data["cmsis"][tag] = git_info
+
+ if "fastmodel" in cpaths:
+ for d in list_subdirs(cpaths["fastmodel"]):
+ print("Fastmodel subdir:", d)
+ json_info = load_json(os.path.join(d, "version.info"))
+ json_info["dir"] = os.path.relpath(d, cwd)
+
+ tag = json_info["version"]
+ # Absolute paths will not work in jenkins since it will change the
+ # workspaace directory between stages convert to relative path
+ data["fastmodel"][tag] = json_info
+
+ for d in list_subdirs(cpaths["checkpatch"]):
+ print("Checkpatch subdir:", d)
+
+ with open(os.path.join(d, "version.info"), "r") as F:
+ url = F.readline().strip()
+
+ git_info = get_remote_git_info(url)
+ d = convert_git_ref_path(d)
+ git_info['dir'] = d
+ tag = os.path.split(git_info["dir"])[-1].split("_")[-1]
+
+ # Absolute paths will not work in jenkins since it will change the
+ # workspaace directory between stages convert to relative path
+ git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
+ data["checkpatch"][tag] = git_info
+ if "fpga" in cpaths:
+ for d in os.listdir(cpaths["fpga"]):
+ print("FPGA imagefile:", d)
+ if ".tar.gz" in d:
+ name = d.split(".tar.gz")[0]
+ platform, subsys, ver = name.split("_")
+ data["fpga"][name] = {"platform": platform,
+ "subsys": subsys,
+ "version": ver,
+ "recovery": os.path.join(cpaths["fpga"],
+ d)}
+ if out_f:
+ print("Exporting metadata to", out_f)
+ save_json(out_f, data)
+ else:
+ pprint(data)
+
+
+def cppcheck_mdt_collect(file_list, out_f=None):
+ """ XML parse multiple cppcheck output files and create a json report """
+
+ xml_files = list(map(os.path.abspath, file_list))
+
+ dict_data = []
+ version = None
+ for xf in xml_files:
+ data = xml_read(xf)
+
+ version = data["results"]["cppcheck"]["@version"]
+ # If nothing is found the errors dictionary will be a Nonetype object
+ if data["results"]["errors"] is not None:
+ # Use json to flatten ordered dict
+ str_data = json.dumps(data["results"]["errors"]["error"])
+ # Remove @ prefix on first char of files that cppcheck adds
+ str_data = str_data.replace("@", '')
+
+ # Convert to dict again(xml to json will have added an array)
+ _dt = json.loads(str_data)
+
+ if isinstance(_dt, list):
+ dict_data += _dt
+ # If only one error is foud it will give it as a single item
+ elif isinstance(_dt, dict):
+ dict_data += [_dt]
+ else:
+ print("Ignoring cpp entry %s of type %s" % (_dt, type(_dt)))
+
+ out_data = {"_metadata_": {"cppcheck-version": version},
+ "report": {}}
+
+ for E in dict_data:
+
+ sever = E.pop("severity")
+
+ # Sort it based on serverity
+ try:
+ out_data["report"][sever].append(E)
+ except KeyError:
+ out_data["report"][sever] = [E]
+
+ _errors = 0
+ for msg_sever, msg_sever_entries in out_data["report"].items():
+ out_data["_metadata_"][msg_sever] = str(len(msg_sever_entries))
+ if msg_sever == "error":
+ _errors = len(msg_sever_entries)
+
+ out_data["_metadata_"]["success"] = True if not int(_errors) else False
+
+ if out_f:
+ save_json(out_f, out_data)
+ else:
+ pprint(out_data)
+
+
+def checkpatch_mdt_collect(file_name, out_f=None):
+ """ Regex parse a checpatch output file and create a report """
+
+ out_data = {"_metadata_": {"errors": 0,
+ "warnings": 0,
+ "lines": 0,
+ "success": True},
+ "report": {}
+ }
+ with open(file_name, "r") as F:
+ cpatch_data = F.read().strip()
+
+ # checkpatch will not report anything when no issues are found
+ if len(cpatch_data):
+ stat_rex = re.compile(r'^total: (\d+) errors, '
+ r'(\d+) warnings, (\d+) lines',
+ re.MULTILINE)
+ line_rex = re.compile(r'([\S]+:)\s([\S]+:)\s([\S ]+)\n', re.MULTILINE)
+ ewl = stat_rex.search(cpatch_data)
+ try:
+ _errors, _warnings, _lines = ewl.groups()
+ except Exception as E:
+ print("Exception parsing checkpatch file.", E)
+ # If there is text but not in know format return -1 and fail job
+ _errors = _warnings = _lines = "-1"
+ checkpath_entries = line_rex.findall(cpatch_data)
+
+ for en in checkpath_entries:
+ _file, _line, _ = en[0].split(":")
+ _type, _subtype, _ = en[1].split(":")
+ _msg = en[2]
+
+ out_data["_metadata_"] = {"errors": _errors,
+ "warnings": _warnings,
+ "lines": _lines,
+ "success": True if not int(_errors)
+ else False}
+
+ E = {"id": _subtype,
+ "verbose": _subtype,
+ "msg": _msg,
+ "location": {"file": _file, "line": _line}
+ }
+ try:
+ out_data["report"][_type.lower()].append(E)
+ except KeyError:
+ out_data["report"][_type.lower()] = [E]
+
+ if out_f:
+ save_json(out_f, out_data)
+ else:
+ pprint(out_data)
+
+
+def jenkins_mdt_collect(out_f):
+ """ Collects Jenkins enviroment information and stores
+ it in a key value list """
+
+ # Jenkins environment parameters are always valid
+ jenkins_env_keys = ["BUILD_ID",
+ "BUILD_URL",
+ "JOB_BASE_NAME",
+ "GERRIT_URL",
+ "GERRIT_PROJECT"]
+ # The following Gerrit parameters only exist when
+ # a job is triggered by a web hook
+ gerrit_trigger_keys = ["GERRIT_CHANGE_NUMBER",
+ "GERRIT_CHANGE_SUBJECT",
+ "GERRIT_CHANGE_ID",
+ "GERRIT_PATCHSET_REVISION",
+ "GERRIT_PATCHSET_NUMBER",
+ "GERRIT_REFSPEC",
+ "GERRIT_CHANGE_URL",
+ "GERRIT_BRANCH",
+ "GERRIT_CHANGE_OWNER_EMAIL",
+ "GERRIT_PATCHSET_UPLOADER_EMAIL"]
+
+ # Find as mamny of the variables in environent
+ el = set(os.environ).intersection(set(jenkins_env_keys +
+ gerrit_trigger_keys))
+ # Format it in key:value pairs
+ out_data = {n: os.environ[n] for n in el}
+ if out_f:
+ save_json(out_f, out_data)
+ else:
+ pprint(out_data)
+
+
+def metadata_collect(user_args):
+ """ Logic for information collection during different stages of
+ the build """
+
+ if user_args.dependencies_checkout and user_args.content_paths:
+ dependencies_mdt_collect(user_args.content_paths,
+ user_args.out_f)
+ elif user_args.git_info:
+ git_info = get_local_git_info(os.path.abspath(user_args.git_info))
+
+ if user_args.out_f:
+ save_json(user_args.out_f, git_info)
+ else:
+ pprint(git_info)
+ elif user_args.cppcheck_files:
+ cppcheck_mdt_collect(user_args.cppcheck_files, user_args.out_f)
+ elif user_args.checkpatch_file:
+ checkpatch_mdt_collect(user_args.checkpatch_file, user_args.out_f)
+ elif user_args.jenkins_info:
+ jenkins_mdt_collect(user_args.out_f)
+ else:
+ print("Invalid Metadata collection arguments")
+ print(user_args)
+ sys.exit(1)
+
+
+def collate_report(key_file_list, ouput_f=None, stdout=True):
+ """ Join different types of json formatted reports into one """
+
+ out_data = {"_metadata_": {}, "report": {}}
+ for kf in key_file_list:
+ try:
+ key, fl = kf.split("=")
+ data = load_json(fl)
+ # If data is a standard reprort (metdata-report parse it)
+ if ("_metadata_" in data.keys() and "report" in data.keys()):
+ out_data["_metadata_"][key] = data["_metadata_"]
+ out_data["report"][key] = data["report"]
+ # Else treat it as a raw information passing dataset
+ else:
+ try:
+ out_data["info"][key] = data
+ except KeyError as E:
+ out_data["info"] = {key: data}
+ except Exception as E:
+ print("Exception parsing argument", kf, E)
+ continue
+ if ouput_f:
+ save_json(ouput_f, out_data)
+ elif stdout:
+ pprint(out_data)
+ return out_data
+
+
+def filter_report(key_value_list, input_f, ouput_f):
+ """ Generates a subset of the data contained in
+ input_f, by selecting only the values defined in key_value list """
+
+ try:
+ rep_data = load_json(input_f)
+ except Exception as E:
+ print("Exception parsing ", input_f, E)
+ sys.exit(1)
+
+ out_data = {}
+ for kf in key_value_list:
+ try:
+ tag, value = kf.split("=")
+ # if multiple selection
+ if(",") in value:
+ out_data[tag] = {}
+ for v in value.split(","):
+ data = rep_data[tag][v]
+ out_data[tag][v] = data
+ else:
+ data = rep_data[tag][value]
+ out_data[tag] = {value: data}
+ except Exception as E:
+ print("Could not extract data-set for k: %s v: %s" % (tag, value))
+ print(E)
+ continue
+ if ouput_f:
+ save_json(ouput_f, out_data)
+ else:
+ pprint(out_data)
+
+
+def parse_report(user_args):
+ """ Parse a report and attempt to determine if it is overall successful or
+ not. It will set the script's exit code accordingly """
+
+ # Parse Mode
+ in_rep = load_json(user_args.report)
+ report_eval = None
+
+ # Extract the required condition for evalutation to pass
+ pass_key, pass_val = split_keys(user_args.set_pass)
+
+ print("Evaluation will succeed if \"%s\" is \"%s\"" % (pass_key,
+ pass_val))
+ try:
+ report_eval = in_rep["_metadata_"][pass_key] == pass_val
+ print("Evaluating detected '%s' field in _metaddata_. " % pass_key)
+ except Exception as E:
+ pass
+
+ if report_eval is None:
+ if isinstance(in_rep, dict):
+ # If report contains an overall success field in metadata do not
+ # parse the items
+ in_rep = in_rep["report"]
+ ev_list = in_rep.values()
+ elif isinstance(in_rep, list):
+ ev_list = in_rep
+ else:
+ print("Invalid data type: %s" % type(in_rep))
+ return
+
+ if user_args.onepass:
+ try:
+ report_eval = in_rep[user_args.onepass][pass_key] == pass_val
+ except Exception as e:
+ report_eval = False
+
+ # If every singel field need to be succesfful, invert the check and
+ # look for those who are not
+ elif user_args.allpass:
+ try:
+ if list(filter(lambda x: x[pass_key] != pass_val, ev_list)):
+ pass
+ else:
+ report_eval = True
+ except Exception as e:
+ print(e)
+ report_eval = False
+ else:
+ print("Evaluation condition not set. Please use -a or -o. Launch"
+ "help (-h) for more information")
+
+ print("Evaluation %s" % ("passed" if report_eval else "failed"))
+ if user_args.eif:
+ print("Setting script exit status")
+ sys.exit(0 if report_eval else 1)
+
+
+def main(user_args):
+ """ Main logic """
+
+ # Metadat Collect Mode
+ if user_args.collect:
+ metadata_collect(user_args)
+ return
+ elif user_args.filter_report:
+ filter_report(user_args.filter_report,
+ user_args.report,
+ user_args.out_f)
+ elif user_args.collate_report:
+ collate_report(user_args.collate_report, user_args.out_f)
+ else:
+ parse_report(user_args)
+
+
+def get_cmd_args():
+ """ Parse command line arguments """
+
+ # Parse command line arguments to override config
+ parser = argparse.ArgumentParser(description="TFM Report Parser.")
+ parser.add_argument("-e", "--error_if_failed",
+ dest="eif",
+ action="store_true",
+ help="If set will change the script exit code")
+ parser.add_argument("-s", "--set-success-field",
+ dest="set_pass",
+ default="status = Success",
+ action="store",
+ help="Set the key which the script will use to"
+ "assert success/failure")
+ parser.add_argument("-a", "--all-fields-must-pass",
+ dest="allpass",
+ action="store_true",
+ help="When set and a list is provided, all entries"
+ "must be succefull for evaluation to pass")
+ parser.add_argument("-o", "--one-field-must-pass",
+ dest="onepass",
+ action="store",
+ help="Only the user defined field must pass")
+ parser.add_argument("-r", "--report",
+ dest="report",
+ action="store",
+ help="JSON file containing input report")
+ parser.add_argument("-c", "--collect",
+ dest="collect",
+ action="store_true",
+ help="When set, the parser will attempt to collect"
+ "information and produce a report")
+ parser.add_argument("-d", "--dependencies-checkout",
+ dest="dependencies_checkout",
+ action="store_true",
+ help="Collect information from a dependencies "
+ "checkout job")
+ parser.add_argument("-f", "--output-file",
+ dest="out_f",
+ action="store",
+ help="Output file to store captured information")
+ parser.add_argument('-p', '--content-paths',
+ dest="content_paths",
+ nargs='*',
+ help=("Pass a space separated list of paths in the"
+ "following format: -p mbedtls=/yourpath/"
+ "fpv=/another/path .Used in conjuction with -n"))
+ parser.add_argument("-g", "--git-info",
+ dest="git_info",
+ action="store",
+ help="Extract git information from given path. "
+ "Requires --colect directive. Optional parameter"
+ "--output-file ")
+ parser.add_argument("-x", "--cpp-check-xml",
+ dest="cppcheck_files",
+ nargs='*',
+ action="store",
+ help="Extract cppcheck static analysis information "
+ " output files, provided as a space separated "
+ "list. Requires --colect directive."
+ " Optional parameter --output-file ")
+ parser.add_argument("-z", "--checkpatch-parse-f",
+ dest="checkpatch_file",
+ action="store",
+ help="Extract checkpatch static analysis information "
+ " output file. Requires --colect directive."
+ " Optional parameter --output-file ")
+ parser.add_argument("-j", "--jenkins-info",
+ dest="jenkins_info",
+ action="store_true",
+ help="Extract jenkings and gerrit trigger enviroment "
+ "information fr. Requires --colect directive."
+ " Optional parameter --output-file ")
+ parser.add_argument("-l", "--collate-report",
+ dest="collate_report",
+ action="store",
+ nargs='*',
+ help="Pass a space separated list of key-value pairs"
+ "following format: -l report_key_0=report_file_0"
+ " report_key_1=report_file_1. Collate will "
+ "generate a joint dataset and print it to stdout."
+ "Optional parameter --output-file ")
+ parser.add_argument("-t", "--filter-report",
+ dest="filter_report",
+ action="store",
+ nargs='*',
+ help="Requires --report parameter for input file."
+ "Pass a space separated list of key-value pairs"
+ "following format: -l report_key_0=value_0"
+ " report_key_1=value_0. Filter will remote all"
+ "entries of the original report but the ones"
+ "mathing the key:value pairs defined and print it"
+ "to stdout.Optional parameter --output-file")
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ main(get_cmd_args())
diff --git a/run-checkpatch.sh b/run-checkpatch.sh
index 60c3be9..0bb78a4 100755
--- a/run-checkpatch.sh
+++ b/run-checkpatch.sh
@@ -24,7 +24,7 @@
##
#This is needed for Doxygen for now.
#!string SKIP_PATHS;
-SKIP_PATHS='./build-\*:./test/\*:./platform/\*:*/tz_\*'
+SKIP_PATHS='./build-\*:./test/\*:./platform/\*:*/tz_\*:./lib/ext/qcbor/\*:./platform/ext/\*:./bl2/ext/\*'
##@var TFM_DIRECTORY_NAME
##@brief Default path to tf-m source code.
@@ -201,13 +201,17 @@
#list of files. This is needed to avoid GIT_CMD to break the argument
#list length.
CARE_LIST=$(eval $FIND_CMD | grep "$(git diff $BASE_COMMIT --name-only)" -)
- GIT_CMD="git diff $BASE_COMMIT -- $CARE_LIST"
- echo "Checking commits: $(git log "$BASE_COMMIT"..HEAD --format=%h | tr $"\n" " ")"
+ if [ ! -z "$CARE_LIST" ]; then
+ # Only run checkpatch if there are files to check
+ GIT_CMD="git diff $BASE_COMMIT -- $CARE_LIST"
+ echo "$GIT_CMD"
+ echo "Checking commits: $(git log "$BASE_COMMIT"..HEAD --format=%h | tr $"\n" " ")"
- #Modify checkpatch parameters to give more details when working on
- #diff:s
- CHECKPATCH_CMD="$CHECKPATCH_CMD --showfile -"
+ #Modify checkpatch parameters to give more details when working on
+ #diff:s
+ CHECKPATCH_CMD="$CHECKPATCH_CMD --showfile -"
+ fi
if [ $VERBOSE -eq 1 ]; then
$GIT_CMD | $CHECKPATCH_CMD | tee -a "$OUTPUT_FILE_PATH"
diff --git a/run-cppcheck.sh b/run-cppcheck.sh
index cc0abd4..467e5e5 100755
--- a/run-cppcheck.sh
+++ b/run-cppcheck.sh
@@ -46,6 +46,9 @@
#The location from where the script executes
mypath=$(dirname $0)
+#The cmake_exported project file in json format
+cmake_commmands=compile_commands.json
+
. "$mypath/util_cmake.sh"
@@ -53,31 +56,94 @@
library_file="$(fix_win_path $(get_full_path $mypath))/cppcheck/arm-cortex-m.cfg"
suppress_file="$(fix_win_path $(get_full_path $mypath))/cppcheck/tfm-suppress-list.txt"
+#Enable all additional checks by default
+additional_checklist="all"
+
#Run cmake to get the compile_commands.json file
echo
echo '******* Generating compile_commandas.json ***************'
echo
generate_project $(fix_win_path $(get_full_path ./)) "./" "cppcheck" "-DCMAKE_EXPORT_COMPILE_COMMANDS=1 -DTARGET_PLATFORM=AN521 -DCOMPILER=GNUARM"
+
#Enter the build directory
bdir=$(make_build_dir_name "./" "cppcheck")
pushd "$bdir" >/dev/null
+
+#The following snippet allows cppcheck to be run differentially againist a
+#commit hash passed as first argument $1. It does not
+#affect the legacy functionality of the script, checking the whole codebase,
+#when called without an argument
+if [[ ! -z "$1" ]]
+ then
+ echo "Enabled git-diff mode againist hash: $1"
+
+ # Do not execute unused functioncheck when running in diff-mode
+ additional_checklist="style,performance,portability,information,missingInclude"
+ # Grep will set exit status to 1 if a commit does not contain c/cpp.. files
+ set +e
+ filtered_cmd_f=compile_commands_filtered.json
+ # Get a list of files modified by the commits between the reference and HEAD
+ flist=$(git diff-tree --no-commit-id --name-only -r $1 | grep -E '\S*\.(c|cpp|cc|cxx|inc|h)$')
+ flist=$(echo $flist | xargs)
+ echo -e "[" > $filtered_cmd_f
+ IFS=$' ' read -ra git_flist <<< "${flist}"
+
+ for fl in "${git_flist[@]}"; do
+ echo "Looking for reference of file: $fl"
+
+ # dry run the command to see if there any ouput
+ JSON_CMD=$(grep -B 3 "\"file\": \".*$fl\"" $cmake_commmands)
+
+ if [ -n "${JSON_CMD}" ]; then
+ command_matched=1
+ grep -B 3 "\"file\": \".*$fl\"" $cmake_commmands >> $filtered_cmd_f
+ echo -e "}," >> $filtered_cmd_f
+ fi
+ done
+ set -e
+
+ # Only continue if files in the patch are included in the build commands
+ if [ -n "${command_matched}" ]; then
+ sed -i '$ d' $filtered_cmd_f
+ echo -e "}\n]" >> $filtered_cmd_f
+
+ cat $filtered_cmd_f > $cmake_commmands
+ else
+ # Always generate an empty file for other stages of ci expecting one
+ echo "CppCheck: Ignoring files not contained in the build config"
+ echo "Files Ignored: $flist"
+ cat <<-EOF > chk-config.xml
+ <?xml version="1.0" encoding="UTF-8"?>
+ <results version="2">
+ <cppcheck version="$(cppcheck --version)"/>
+ <errors>
+ </errors>
+ </results>
+ EOF
+ cp chk-config.xml chk-src.xml
+ exit 0
+ fi
+fi
+
+
#Build the external projects to get all headers installed to plases from where
#tf-m code uses them
echo
echo '******* Install external projects to their final place ***************'
echo
-make -j mbedcrypto_lib_install mbedtls_mcuboot_lib_install
+make -j mbedtls_mcuboot_lib_install
#Now run cppcheck.
echo
echo '******* checking cppcheck configuration ***************'
echo
-cppcheck --xml -j 4 --check-config --enable=all --library="$library_file" --project=compile_commands.json --suppressions-list="$suppress_file" --inline-suppr 2>chk-config.xml
+
+cppcheck --xml --check-config --enable="$additional_checklist" --library="$library_file" --project=$cmake_commmands --suppressions-list="$suppress_file" --inline-suppr 2>chk-config.xml
echo
echo '******* analyzing files with cppcheck ***************'
echo
-cppcheck --xml -j 4 --enable=all --library="$library_file" --project=compile_commands.json --suppressions-list="$suppress_file" --inline-suppr 2>chk-src.xml
+cppcheck --xml --enable="$additional_checklist" --library="$library_file" --project=$cmake_commmands --suppressions-list="$suppress_file" --inline-suppr 2>chk-src.xml
popd
echo
diff --git a/tfm_ci_pylib/fastmodel_wrapper/__init__.py b/tfm_ci_pylib/fastmodel_wrapper/__init__.py
new file mode 100644
index 0000000..d59ebcc
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/__init__.py
@@ -0,0 +1,21 @@
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+__all__ = ["config_templates",
+ "fastmodel_config_map",
+ "fastmodel_wrapper",
+ "fastmodel_wrapper_config"]
+
+from .fastmodel_wrapper_config import config_variant, fpv_wrapper_config
+from .fastmodel_wrapper import FastmodelWrapper
+from .fastmodel_config_map import FastmodelConfigMap
+
+from .config_templates import template_default_config, \
+ template_regression_config, template_coreipc_config, \
+ template_coreipctfmlevel2_config
diff --git a/tfm_ci_pylib/fastmodel_wrapper/config_templates.py b/tfm_ci_pylib/fastmodel_wrapper/config_templates.py
new file mode 100644
index 0000000..6c9c636
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/config_templates.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+
+""" config_templatess.py:
+
+ """
+
+from __future__ import print_function
+from copy import deepcopy
+from .fastmodel_wrapper_config import fpv_wrapper_config
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+
+# =================== Template Classes ===================
+class template_cfg(fpv_wrapper_config):
+ """ Creates a skeleton template configuration that allows creation of
+ configuration variants which set the parameters of:
+ buildpath, config, platform, compiler , as well as the missing test params,
+ test_rex, test_cases, test_end_string """
+
+ _name = fpv_wrapper_config._name + "_%(platform)s_%(compiler)s_" + \
+ "%(config)s_%(build_type)s_%(bootloader)s"
+ # variant dictionary allows indivudal and targeted parameter modification
+ _vdict = {
+ "build_path": "%(build_path)s",
+ "variant_name_tpl": "%(variant_name_tpl)s",
+ "app_bin_path": "%(app_bin_path)s",
+ "app_bin": "%(app_bin)s",
+ "data_bin_path": "%(data_bin_path)s",
+ "data_bin": "%(data_bin)s",
+ "data_bin_offset": "%(data_bin_offset)s",
+ "config": "%(config)s",
+ "platform": "%(platform)s",
+ "compiler": "%(compiler)s",
+ "build_type": "%(build_type)s",
+ "bootloader": "%(bootloader)s"
+ }
+
+ _cfg = deepcopy(fpv_wrapper_config._cfg)
+ _cfg["directory"] = "FVP_MPS2"
+ _cfg["terminal_log"] = "terminal_%(variant_name_tpl)s.log"
+ _cfg["bin"] = "FVP_MPS2_AEMv8M"
+ _cfg["error_on_failed"] = False
+ _cfg["application"] = (
+ "cpu0=%(build_path)s/%(variant_name_tpl)s/" +
+ "%(app_bin_path)s/%(app_bin)s")
+ _cfg["data"] = (
+ "cpu0=%(build_path)s/%(variant_name_tpl)s/%(data_bin_path)s/" +
+ "%(data_bin)s@%(data_bin_offset)s")
+ _cfg["simlimit"] = "600"
+ _cfg["parameters"] = [
+ "fvp_mps2.platform_type=2",
+ "cpu0.baseline=0",
+ "cpu0.INITVTOR_S=0x10000000",
+ "cpu0.semihosting-enable=0",
+ "fvp_mps2.DISABLE_GATING=0",
+ "fvp_mps2.telnetterminal0.start_telnet=0",
+ "fvp_mps2.telnetterminal1.start_telnet=0",
+ "fvp_mps2.telnetterminal2.start_telnet=0",
+ "fvp_mps2.telnetterminal0.quiet=1",
+ "fvp_mps2.telnetterminal1.quiet=1",
+ "fvp_mps2.telnetterminal2.quiet=1",
+ "fvp_mps2.UART0.out_file=$TERM_FILE",
+ "fvp_mps2.UART0.unbuffered_output=1",
+ "fvp_mps2.UART0.shutdown_on_eot=1",
+ "fvp_mps2.mps2_visualisation.disable-visualisation=1"]
+
+
+class template_default_config(template_cfg):
+ """ Will automatically populate the required information for tfm
+ Default configuration testing. User still needs to set the
+ buildpath, platform, compiler variants """
+
+ _cfg = deepcopy(template_cfg._cfg)
+
+ _vdict = deepcopy(template_cfg._vdict)
+
+ # Set defaults across all variants
+ _vdict["build_path"] = "build-ci-all"
+ _vdict["app_bin_path"] = "install/outputs/fvp"
+ _vdict["data_bin_path"] = "install/outputs/fvp"
+ _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+ "%(build_type)s_%(bootloader)s"
+
+ # Mofify the %(config)s parameter of the template
+ _vdict["config"] = "ConfigDefault"
+ _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+ # System supports two types of matching with
+ # test_case_id and result match group and only test_case_id
+ _cfg["test_rex"] = (r'\x1b\[1;34m\[Sec Thread\] '
+ r'(?P<test_case_id>Secure image initializing!)\x1b\[0m'
+ )
+
+ # test_case_id capture group Should match test_cases entries
+ _cfg["test_cases"] = [
+ 'Secure image initializing!',
+ ]
+ # Testing will stop if string is reached
+ _cfg["test_end_string"] = "Secure image initializing"
+ _cfg["simlimit"] = "120"
+
+class template_regression_config(template_cfg):
+ """ Will automatically populate the required information for tfm
+ Regression configuration testing. User still needs to set the
+ buildpath, platform, compiler variants """
+
+ _cfg = deepcopy(template_cfg._cfg)
+ _vdict = deepcopy(template_cfg._vdict)
+
+ # Set defaults across all variants
+ _vdict["build_path"] = "build-ci-all"
+ _vdict["app_bin_path"] = "install/outputs/fvp"
+ _vdict["data_bin_path"] = "install/outputs/fvp"
+ _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+ "%(build_type)s_%(bootloader)s"
+
+ # Mofify the %(config)s parameter of the template
+ _vdict["config"] = "ConfigRegression"
+ _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+ # Populate the test cases
+ _cfg["test_rex"] = (r"[\x1b]\[37mTest suite '(?P<test_case_id>[^\n]+)'"
+ r" has [\x1b]\[32m (?P<result>PASSED|FAILED)")
+ _cfg["test_cases"] = [
+ 'PSA protected storage S interface tests (TFM_SST_TEST_2XXX)',
+ 'PSA protected storage NS interface tests (TFM_SST_TEST_1XXX)',
+ 'SST reliability tests (TFM_SST_TEST_3XXX)',
+ 'Core non-secure positive tests (TFM_CORE_TEST_1XXX)',
+ 'AuditLog non-secure interface test (TFM_AUDIT_TEST_1XXX)',
+ 'Crypto non-secure interface test (TFM_CRYPTO_TEST_6XXX)',
+ 'Initial Attestation Service '
+ 'non-secure interface tests(TFM_ATTEST_TEST_2XXX)',
+ 'Invert non-secure interface tests (TFM_INVERT_TEST_1XXX)',
+ 'SST rollback protection tests (TFM_SST_TEST_4XXX)',
+ 'Audit Logging secure interface test (TFM_AUDIT_TEST_1XXX)',
+ 'Crypto secure interface tests (TFM_CRYPTO_TEST_5XXX)',
+ 'Initial Attestation Service secure '
+ 'interface tests(TFM_ATTEST_TEST_1XXX)',
+ 'Invert secure interface tests (TFM_INVERT_TEST_1XXX)',
+ ]
+ _cfg["test_end_string"] = "End of Non-secure test suites"
+
+ _cfg["simlimit"] = "1200"
+
+
+class template_coreipc_config(template_cfg):
+ """ Will automatically populate the required information for tfm
+ coreipc configuration testing. User still needs to set the
+ buildpath, platform, compiler variants """
+
+ _cfg = deepcopy(template_cfg._cfg)
+
+ _vdict = deepcopy(template_cfg._vdict)
+
+ # Set defaults across all variants
+ _vdict["build_path"] = "build-ci-all"
+
+ _vdict["app_bin_path"] = "install/outputs/fvp"
+ _vdict["data_bin_path"] = "install/outputs/fvp"
+
+ _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+ "%(build_type)s_%(bootloader)s"
+
+ # Mofify the %(config)s parameter of the template
+ _vdict["config"] = "ConfigCoreIPC"
+ _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+ # System supports two types of matching with
+ # test_case_id and result match group and only test_case_id
+ _cfg["test_rex"] = (r'\x1b\[1;34m\[Sec Thread\] '
+ r'(?P<test_case_id>Secure image initializing!)\x1b\[0m'
+ )
+
+ # test_case_id capture group Should match test_cases entries
+ _cfg["test_cases"] = [
+ 'Secure image initializing!',
+ ]
+ # Testing will stop if string is reached
+ _cfg["test_end_string"] = "Secure image initializing"
+ _cfg["simlimit"] = "1200"
+
+class template_coreipctfmlevel2_config(template_cfg):
+ """ Will automatically populate the required information for tfm
+ coreipc tfmlevel2 configuration testing. User still needs to set the
+ buildpath, platform, compiler variants """
+
+ _cfg = deepcopy(template_cfg._cfg)
+
+ _vdict = deepcopy(template_cfg._vdict)
+
+ # Set defaults across all variants
+ _vdict["build_path"] = "build-ci-all"
+
+ _vdict["app_bin_path"] = "install/outputs/fvp"
+ _vdict["data_bin_path"] = "install/outputs/fvp"
+
+ _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+ "%(build_type)s_%(bootloader)s"
+
+ # Mofify the %(config)s parameter of the template
+ _vdict["config"] = "ConfigCoreIPCTfmLevel2"
+ _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+ # System supports two types of matching with
+ # test_case_id and result match group and only test_case_id
+ _cfg["test_rex"] = (r'\x1b\[1;34m\[Sec Thread\] '
+ r'(?P<test_case_id>Secure image initializing!)\x1b\[0m'
+ )
+
+ # test_case_id capture group Should match test_cases entries
+ _cfg["test_cases"] = [
+ 'Secure image initializing!',
+ ]
+ # Testing will stop if string is reached
+ _cfg["test_end_string"] = "Secure image initializing"
+ _cfg["simlimit"] = "1200"
diff --git a/tfm_ci_pylib/fastmodel_wrapper/fastmodel_config_map.py b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_config_map.py
new file mode 100644
index 0000000..1a58441
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_config_map.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python3
+
+""" fastmodel_config_map.py:
+
+ Using Python clas inheritance model to generate modular and easily to scale
+ configuration models for the run_fpv module. Configuration data is also
+ combined with helper methods. If the file is run as a standalone file,
+ it can save json configuration files to disk if requested by --export
+ directive """
+
+from __future__ import print_function
+from copy import deepcopy
+from pprint import pprint
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+
+class FastmodelConfigMap(object):
+
+ def __init__(self, enviroment, platform):
+ pass
+
+ self._platforms = [platform]
+ self._cfg_map = self.global_import(enviroment)
+ self._invalid = []
+
+ def add_invalid(self, invalid_tuple):
+ self._invalid.append(invalid_tuple)
+
+ def get_invalid(self):
+ return deepcopy(self._invalid)
+
+ def global_import(self, enviroment, classname="TfmFastModelConfig"):
+ """ Import modules with specified classname from enviroment
+ provided by caller """
+
+ # Select the imported modules with a __name__ attribute
+ ol = {nme: cfg for nme, cfg in enviroment.items()
+ if hasattr(cfg, '__name__')}
+
+ # Select those who match the classname
+ fcfg = {nme: cfg_obj for nme, cfg_obj
+ in ol.items() if cfg_obj .__name__ == classname}
+
+ return {self._platforms[0]: fcfg}
+
+ def __add__(self, obj_b):
+ """ Override addition operator """
+
+ # Create a new object of left hand operant for return
+ ret_obj = deepcopy(self)
+
+ # Get references to new class members
+ map_a = ret_obj._cfg_map
+ platforms_a = ret_obj._platforms
+ map_b = obj_b.get_object_map()
+ for platform, config in map_b.items():
+
+ if platform in map_a.keys():
+ for cfg_name, cfg_object in config.items():
+ if cfg_name in map_a[platform].keys():
+ print("Matching entrty name %s" % (cfg_name))
+ print("Left operant entry: %s "
+ "will be replaced by: %s" %
+ (map_a[platform][cfg_name], cfg_object))
+ map_a[platform][cfg_name] = cfg_object
+ else:
+ map_a[platform] = deepcopy(config)
+ platforms_a.append(platform)
+
+ return ret_obj
+
+ def _cmerge(self):
+ """ Join all the platform configs """
+
+ ret = {}
+ for entry in self._cfg_map.values():
+ for name, cfg in entry.items():
+ ret[name] = cfg
+ return ret
+
+ def get_object_map(self):
+ """ Returns the config map as objects """
+
+ return deepcopy(self._cfg_map)
+
+ def get_config_map(self):
+ """ Return a copy of the config map with the config objects rendered
+ as dictionaries """
+
+ ret_dict = deepcopy(self._cfg_map)
+ for platform, config in self._cfg_map.items():
+ for name, cfg_object in config.items():
+ ret_dict[platform][name] = cfg_object.get_config()
+ return ret_dict
+
+ def list(self):
+ """ Print a quick list of the contained platforms and
+ configuration names """
+
+ return list(self._cmerge().keys())
+
+ def print_list(self):
+ """ Print a quick list of the contained platforms and
+ configuration names """
+
+ for platform, config in self._cfg_map.items():
+ print("=========== Platform: %s ===========" % platform)
+ for name, cfg_object in config.items():
+ print(name)
+
+ def print(self):
+ """ Print the contents of a human readable config map """
+
+ pprint(self.get_config_map())
+
+ def get_config_object(self, config_name, platform=None):
+ try:
+ cfg_dict = self._cfg_map[platform]
+ except Exception as e:
+ cfg_dict = self._cmerge()
+
+ return cfg_dict[config_name]
+
+ def get_config(self, config_name, platform=None):
+
+ return self.get_config_object(config_name, platform).get_config()
+
+ def patch_config(self, cfg_name, key, new_data, platform=None):
+ """ Modify a configuration entry, and re-render the class """
+
+ cfg_object = self.get_config_object(cfg_name, platform)
+
+ # Do not
+ if cfg_object.get_variant_metadata()[key] == new_data:
+ return
+ v_meta = cfg_object.get_variant_metadata()
+ v_meta[key] = new_data
+ cfg_object.set_variant_metadata(v_meta).rebuild()
+
+
+def fvp_config_object_change_path(cfg_object, new_path):
+ """ Change the common artifact storage path and update its
+ configuration """
diff --git a/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper.py b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper.py
new file mode 100755
index 0000000..7566c2e
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper.py
@@ -0,0 +1,553 @@
+#!/usr/bin/env python3
+
+""" fastmodel_wrapper.py:
+
+ Wraps around Fast models which will execute in headless model
+ producing serial output to a defined log file. It will spawn two Proccesses
+ and one thread to monitor the output of the simulation and end it when a
+ user defined condition is matched. It will perform a set of tests and will
+ change the script exit code based on the output of the test """
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+import os
+import re
+import sys
+import argparse
+from time import sleep
+from pprint import pprint
+from copy import deepcopy
+from threading import Thread
+from queue import Queue, Empty
+from subprocess import Popen, PIPE, STDOUT
+
+try:
+ from tfm_ci_pylib.utils import find_missing_files, \
+ detect_python3, test, check_pid_status, save_json, save_dict_json, \
+ load_json
+except ImportError:
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(os.path.join(dir_path, "../"))
+ from tfm_ci_pylib.utils import find_missing_files, \
+ detect_python3, test, check_pid_status, save_json, save_dict_json, \
+ load_json
+
+
+class FastmodelWrapper(object):
+ """ Controlling Class that wraps around an ARM Fastmodel and controls
+ execution, adding regex flow controls, and headless testing """
+
+ def __init__(self,
+ fvp_cfg=None,
+ work_dir="./",
+ fvp_dir=None,
+ fvp_binary=None,
+ fvp_app=None,
+ fvp_boot=None,
+ terminal_file=None,
+ fvp_time_out=None,
+ fvp_test_error=None):
+
+ # Required by other methods, always set working directory first
+ self.work_dir = os.path.abspath(work_dir)
+
+ # Load the configuration from object or file
+ self.config, self.name = self.load_config(fvp_cfg)
+
+ self.show_config()
+
+ # Print a header
+ ln = int((62 - len(self.name) + 1) / 2)
+ print("\n%s Running Test: %s %s\n" % ("#" * ln, self.name, "#" * ln))
+
+ # consume the configuration parameters not related to FPV
+ # Extract test cases
+ self.test_list = self.config.pop("test_cases")
+ self.test_end_string = self.config.pop("test_end_string")
+ self.test_rex = self.config.pop("test_rex")
+
+ # Command line arguments overrides
+ # When those arguments are provided they override config entries
+ f_dir = self.config.pop("directory")
+ if fvp_dir:
+ self.fvp_dir = os.path.abspath(fvp_dir)
+ else:
+ self.fvp_dir = os.path.abspath(f_dir)
+
+ ef = self.config.pop("error_on_failed")
+ if fvp_test_error:
+ self.fvp_test_error = fvp_test_error
+ else:
+ self.fvp_test_error = ef
+
+ tf = self.config.pop("terminal_log")
+ if terminal_file:
+ self.term_file = os.path.abspath(terminal_file)
+ else:
+ tf = os.path.join(self.work_dir, tf)
+ self.term_file = os.path.abspath(tf)
+
+ # Override config entries directly
+ if fvp_binary:
+ self.config["bin"] = fvp_binary
+
+ if fvp_boot:
+ if re.match(r'[\S]+.axf$', fvp_boot):
+ self.config["application"] = "cpu0=" +\
+ os.path.abspath(fvp_boot)
+ else:
+ print("Invalid bootloader %s. Expecting .axf file" % fvp_app)
+ sys.exit(1)
+
+ # Ensure that the firmware is copied at the appropriate memory region
+ # perfect mathc regx for future ref r'^(?:cpu=)[\S]+.bin@0x10080000$'
+ # TODO remove that when other platforms are added
+ if fvp_app:
+ if re.match(r'[\S]+.bin$', fvp_app):
+ self.config["data"] = "cpu0=" +\
+ os.path.abspath(fvp_app) +\
+ "@0x10080000"
+ else:
+ print("Invalid firmware %s. Expecting .bin file" % fvp_app)
+ sys.exit(1)
+
+ if fvp_time_out:
+ self.fvp_time_out = fvp_time_out
+ self.config["simlimit"] = fvp_time_out
+
+ self.monitor_q = Queue()
+ self.stop_all = False
+ self.pids = []
+ self.fvp_test_summary = False
+
+ # Asserted only after a complete test run,including end string matching
+ self.test_complete = False
+
+ self.test_report = None
+
+ # Change to working directory
+ os.chdir(self.work_dir)
+ print("Switching to working directory: %s" % self.work_dir)
+ # Clear the file it it has been created before
+ with open(self.term_file, "w") as F:
+ F.write("")
+
+ def show_config(self):
+ """ print the configuration to console """
+
+ print("\n%s config:\n" % self.name)
+ pprint(self.config)
+
+ def load_config(self, config):
+ """ Load the configuration from a json file or a memory map"""
+
+ try:
+ # If config is an dictionary object use it as is
+ if isinstance(config, dict):
+ ret_config = config
+ elif isinstance(config, str):
+ # if the file provided is not detected attempt to look for it
+ # in working directory
+ if not os.path.isfile(config):
+ # remove path from file
+ cfg_file_2 = os.path.split(config)[-1]
+ # look in the current working directory
+ cfg_file_2 = os.path.join(self.work_dir, cfg_file_2)
+ if not os.path.isfile(cfg_file_2):
+ m = "Could not find cfg in %s or %s " % (config,
+ cfg_file_2)
+ raise Exception(m)
+ # If fille exists in working directory
+ else:
+ config = cfg_file_2
+ # Attempt to load the configuration from File
+ ret_config = load_json(config)
+ else:
+ raise Exception("Need to provide a valid config name or file."
+ "Please use --config/--config-file parameter.")
+
+ except Exception as e:
+ print("Error! Could not load config. Quitting")
+ sys.exit(1)
+
+ # Generate Test name (Used in test report) from terminal file.
+ tname = ret_config["terminal_log"].replace("terminal_", "")\
+ .split(".")[0].lower()
+
+ return deepcopy(ret_config), tname
+
+ def save_config(self, config_file="fvp_tfm_config.json"):
+ """ Safe current configuration to a json file """
+
+ # Add stripped information to config
+ exp_cfg = deepcopy(self.config)
+
+ exp_cfg["terminal_log"] = self.term_file
+ exp_cfg["error_on_failed"] = self.fvp_test_error
+ exp_cfg["directory"] = self.fvp_dir
+ exp_cfg["test_cases"] = self.test_list
+ exp_cfg["test_end_string"] = self.test_end_string
+ exp_cfg["test_rex"] = self.test_rex
+
+ cfg_f = os.path.join(self.work_dir, config_file)
+ save_dict_json(cfg_f, exp_cfg, exp_cfg.get_sort_order())
+ print("Configuration %s exported." % cfg_f)
+
+ def compile_cmd(self):
+ """ Compile all the FPV realted information into a command that can
+ be executed manually """
+
+ cmd = ""
+ for name, value in self.config.items():
+ # Place executable to the beggining of the machine
+ if name == "bin":
+ cmd = value + cmd
+ elif name == "parameters":
+ cmd += " " + " ".join(["--parameter %s" % p for p in value])
+ # Allows setting a second binary file as data field
+ elif name == "application" and ".bin@0x0" in value:
+ cmd += " --data %s" % value
+ else:
+ cmd += " --%s %s" % (name, value)
+
+ # Add the path to the command
+ cmd = os.path.join(self.fvp_dir, cmd)
+
+ # Add the log file to the command (optional)
+ cmd = cmd.replace("$TERM_FILE", self.term_file)
+ return cmd
+
+ def show_cmd(self):
+ """ print the FPV command to console """
+
+ print(self.compile_cmd())
+
+ def run_fpv(self):
+ """ Run the Fast Model test in a different proccess and return
+ the pid for housekeeping puproses """
+
+ def fpv_stdout_parser(dstream, queue):
+ """ THREAD: Read STDOUT/STDERR and stop if proccess is done """
+
+ for line in iter(dstream.readline, b''):
+ if self.stop_all:
+ break
+ else:
+ # Python2 ignores byte literals, P3 requires parsing
+ if detect_python3():
+ line = line.decode("utf-8")
+ if "Info: /OSCI/SystemC: Simulation stopped by user" in line:
+ print("/OSCI/SystemC: Simulation stopped")
+ self.stop()
+ break
+
+ # Convert to list
+ cmd = self.compile_cmd().split(" ")
+
+ # Run it as subproccess
+ self.fvp_proc = Popen(cmd, stdout=PIPE, stderr=STDOUT, shell=False)
+ self._fvp_thread = Thread(target=fpv_stdout_parser,
+ args=(self.fvp_proc.stdout,
+ self.monitor_q))
+ self._fvp_thread.daemon = True
+ self._fvp_thread.start()
+ return self.fvp_proc.pid
+
+ def run_monitor(self):
+ """ Run a parallel threaded proccess that monitors the output of
+ the FPV and stops it when the a user specified string is found.
+ It returns the pid of the proccess for housekeeping """
+
+ def monitor_producer(dstream, queue):
+ """ THREAD: Read STDOUT and push data into a queue """
+
+ for line in iter(dstream.readline, b''):
+ if self.stop_all:
+ break
+ else:
+ # Python2 ignores byte literals, P3 requires parsing
+ if detect_python3():
+ line = line.decode("utf-8")
+
+ queue.put(line)
+
+ # If the text end string is found terminate
+ if self.test_end_string in str(line):
+
+ queue.put("Found End String \"%s\"" % self.test_end_string)
+ self.test_complete = True
+ self.stop()
+ break
+ # If the FPV stopps by iteself (i.e simlimit reached) terminate
+ if "SystemC: Simulation stopped by user" in str(line):
+
+ queue.put("Simulation Ended \"%s\"" % self.test_end_string)
+ self.stop()
+ break
+
+ dstream.close()
+ return
+
+ # Run the tail as a separate proccess
+ cmd = ["tail", "-f", self.term_file]
+ self.monitor_proc = Popen(cmd, stdout=PIPE, stderr=STDOUT, shell=False)
+
+ self._fvp_mon_thread = Thread(target=monitor_producer,
+ args=(self.monitor_proc.stdout,
+ self.monitor_q))
+ self._fvp_mon_thread.daemon = True
+ self._fvp_mon_thread.start()
+ return self.monitor_proc.pid
+
+ def monitor_consumer(self):
+ """ Read the ouptut of the monitor thread and print the queue entries
+ one entry at the time (One line per call) """
+ try:
+ line = self.monitor_q.get_nowait()
+ except Empty:
+ pass
+ else:
+ print(line.rstrip())
+
+ def has_stopped(self):
+ """Retrun status of stop flag. True indicated stopped state """
+
+ return self.stop_all
+
+ def start(self):
+ """ Start the FPV and the montor procccesses and keep
+ track of their pids"""
+
+ # Do not spawn fpv unless everything is in place if
+ bin_list = [os.path.join(self.fvp_dir, self.config["bin"]),
+ self.config["application"].replace("cpu0=", "")
+ .replace("@0x0", ""),
+ self.config["data"].replace("@0x10080000", "")
+ .replace("@0x00100000", "")
+ .replace("cpu0=", "")]
+
+ if find_missing_files(bin_list):
+ print("Could not find all binaries from %s" % ", ".join(bin_list))
+ print("Missing Files:", ", ".join(find_missing_files(bin_list)))
+ sys.exit(1)
+
+ self.pids.append(self.run_fpv())
+ self.pids.append(self.run_monitor())
+ print("Spawned Proccesses with PID %s" % repr(self.pids)[1:-1])
+ return self
+
+ def stop(self):
+ """ Stop all threads, proccesses and make sure there are no leaks """
+
+ self.stop_all = True
+
+ # Send the gratious shutdown signal
+ self.monitor_proc.terminate()
+ self.fvp_proc.terminate()
+ sleep(1)
+ # List the Zombies
+ # TODO remove debug output
+ for pid in sorted(self.pids):
+ if check_pid_status(pid, ["zombie", ]):
+ pass
+ # print("Warning. Defunc proccess %s" % pid)
+
+ def test(self):
+ """ Parse the output terminal file and evaluate status of tests """
+
+ # read the output file
+ with open(self.term_file, "r") as F:
+ terminal_log = F.read()
+
+ pass_text = "PASSED"
+ # create a filtering regex
+ rex = re.compile(self.test_rex)
+
+ # Extract tests status as a tuple list
+ tests = rex.findall(terminal_log)
+
+ try:
+ if isinstance(tests, list):
+ if len(tests):
+ # when test regex is in format [(test_name, RESULT),...]
+ if isinstance(tests[0], tuple):
+ # Convert result into a dictionary
+ tests = dict(zip(*list(zip(*tests))))
+ # when regex is in format [(test_name, test_name 2),...]
+ # we just need to verify they exist
+ elif isinstance(tests[0], str):
+ pass_text = "PRESENT"
+ tests = dict(zip(tests,
+ [pass_text for n in range(len(tests))]))
+ else:
+ raise Exception("Incompatible Test Format")
+ else:
+ raise Exception("Incompatible Test Format")
+ else:
+ raise Exception("Incompatible Test Format")
+ except Exception:
+
+ if not self.test_complete:
+ print("Warning! Test did not complete.")
+ else:
+ print("Error", "Invalid tests format: %s type: %s" %
+ (tests, type(tests)))
+ # Pass an empty output to test. Do not exit prematurely
+ tests = {}
+
+ # Run the test and store the report
+ self.test_report = test(self.test_list,
+ tests,
+ pass_text=pass_text,
+ test_name=self.name,
+ error_on_failed=self.fvp_test_error,
+ summary=self.fvp_test_summary)
+ return self
+
+ def get_report(self):
+ """ Return the test report object to caller """
+
+ if not self.test_report:
+ raise Exception("Can not create report from incomplete run cycle!")
+ return self.test_report
+
+ def save_report(self, rep_f=None):
+ """ Export report into a file, set by test name but can be overidden by
+ rep_file"""
+
+ if not self.stop_all or not self.test_report:
+ print("Can not create report from incomplete run cycle!")
+ return
+
+ if not rep_f:
+ rep_f = os.path.join(self.work_dir, "report_%s.json" % self.name)
+ rep_f = os.path.abspath(rep_f)
+ save_json(rep_f, self.test_report)
+ print("Exported test report: %s" % rep_f)
+ return self
+
+ def block_wait(self):
+ """ Block execution flow and wait for the monitor to complete """
+ try:
+ while True:
+ for pid in sorted(self.pids):
+
+ if not check_pid_status(pid, ["running",
+ "sleeping",
+ "disk"]):
+ print("Child proccess of pid: %s has died, exitting!" %
+ pid)
+ self.stop()
+ if self.has_stopped():
+ break
+ else:
+ self.monitor_consumer()
+
+ except KeyboardInterrupt:
+ print("User initiated interrupt")
+ self.stop()
+ # Allows method to be chainloaded
+ return self
+
+
+def get_cmd_args():
+ """ Parse command line arguments """
+
+ # Parse command line arguments to override config
+ parser = argparse.ArgumentParser(description="TFM Fastmodel wrapper.")
+ parser.add_argument("--bin",
+ dest="fvp_bin",
+ action="store",
+ help="Fast Model platform binary file")
+ parser.add_argument("--firmware",
+ dest="fvp_firm",
+ action="store",
+ help="Firmware application file to run")
+ parser.add_argument("--boot",
+ dest="fvp_boot",
+ action="store",
+ help="Fast Model bootloader file")
+ parser.add_argument("--fpv-path",
+ dest="fvp_dir",
+ action="store",
+ help="Directory path containing the Fast Models")
+ parser.add_argument("--work-path",
+ dest="work_dir", action="store",
+ default="./",
+ help="Working directory (Where logs are stored)")
+ parser.add_argument("--time-limit",
+ dest="time", action="store",
+ help="Time in seconds to run the simulation")
+ parser.add_argument("--log-file",
+ dest="termf",
+ action="store",
+ help="Set terminal log file name")
+ parser.add_argument("--error",
+ dest="test_err",
+ action="store",
+ help="raise sys.error = 1 if test failed")
+ parser.add_argument("--config-file",
+ dest="config_file",
+ action="store",
+ help="Path of configuration file")
+ parser.add_argument("--print-config",
+ dest="p_config",
+ action="store_true",
+ help="Print the configuration to console")
+ parser.add_argument("--print-command",
+ dest="p_command",
+ action="store_true",
+ help="Print the FPV launch command to console")
+ return parser.parse_args()
+
+
+def main(user_args):
+ """ Main logic """
+
+ # Create FPV handler
+ F = FastmodelWrapper(fvp_cfg=user_args.config_file,
+ work_dir=user_args.work_dir,
+ fvp_dir=user_args.fvp_dir,
+ fvp_binary=user_args.fvp_bin,
+ fvp_boot=user_args.fvp_boot,
+ fvp_app=user_args.fvp_firm,
+ terminal_file=user_args.termf,
+ fvp_time_out=user_args.time,
+ fvp_test_error=user_args.test_err)
+
+ if user_args.p_config:
+ F.show_config()
+ sys.exit(0)
+
+ if user_args.p_command:
+ F.show_cmd()
+ sys.exit(0)
+
+ # Start the wrapper
+ F.start()
+
+ # Wait for the wrapper to complete
+ F.block_wait()
+
+ print("Shutting Down")
+ # Test the output of the system only after a full execution
+ if F.test_complete:
+ F.test()
+
+
+if __name__ == "__main__":
+ main(get_cmd_args())
diff --git a/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper_config.py b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper_config.py
new file mode 100644
index 0000000..0c2f60a
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper_config.py
@@ -0,0 +1,267 @@
+#!/usr/bin/env python3
+
+""" fastmodel_wrapper_config.py:
+
+ Using Python clas inheritance model to generate modular and easily to scale
+ configuration models for the run_fpv module. Configuration data is also
+ combined with helper methods. If the file is run as a standalone file,
+ it can save json configuration files to disk if requested by --export
+ directive """
+
+from __future__ import print_function
+from collections import OrderedDict
+from copy import deepcopy
+from pprint import pprint
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+
+try:
+ from tfm_ci_pylib.utils import save_dict_json
+except ImportError:
+ import os
+ import sys
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(os.path.join(dir_path, "../"))
+ from tfm_ci_pylib.utils import save_dict_json
+
+
+# Used in fixed sorting of configuration before generating a json file
+# WARNING modification of this file will fundamentaly change behavior
+config_sort_order = [
+ "directory",
+ "terminal_log",
+ "bin",
+ "error_on_failed",
+ "test_rex", "test_cases",
+ "test_end_string",
+ "application",
+ "data",
+ "simlimit",
+ "parameters"
+]
+
+
+class fpv_wrapper_config(object):
+ """ Controlling Class that wraps around an ARM Fastmodel and controls
+ execution, adding regex flow controls, and headless testing """
+
+ # Ensure the dictionary entries are sorted
+ _cfg = OrderedDict.fromkeys(config_sort_order)
+ _name = "run_fpv"
+
+ def __init__(self,
+ fvp_dir,
+ terminal_file,
+ fvp_binary,
+ eof,
+ test_rex,
+ test_cases,
+ test_end_string,
+ fvp_app,
+ fvp_boot,
+ fvp_sim_limit,
+ params):
+
+ self._cfg["directory"] = fvp_dir
+ self._cfg["terminal_log"] = terminal_file
+ self._cfg["bin"] = fvp_binary
+ self._cfg["error_on_failed"] = eof
+ self._cfg["test_rex"] = test_rex
+ self._cfg["test_cases"] = test_cases
+ self._cfg["test_end_string"] = test_end_string
+ self._cfg["application"] = fvp_app
+ self._cfg["data"] = fvp_boot
+ self._cfg["simlimit"] = fvp_sim_limit
+ self._cfg["parameters"] = params
+
+ @classmethod
+ def get_config(self):
+ """ Return a copy of the fastmodel configuration dictionary """
+ return dict(deepcopy(self._cfg))
+
+ @classmethod
+ def get_variant_metadata(self):
+ """ Return a copy of the class generator variant dictionary """
+ return deepcopy(self._vdict)
+
+ @classmethod
+ def set_variant_metadata(self, vdict):
+ """ Replace the metadata dictionary with user provided one """
+
+ self._vdict = deepcopy(vdict)
+
+ return self
+
+ @classmethod
+ def querry_variant_metadata(self, key, value):
+ """ Verify that metadata dictionary contains value for key entry """
+
+ return self._vdict[key] == value
+
+ @classmethod
+ def rebuild(self):
+ """ Recreate the configuration of a class after metadata has been
+ modified """
+
+ # Reset the configuration entries to the stock ones
+ self._cfg = deepcopy(self._tpl_cfg)
+
+ # recreate a temporary class with proper configuration
+ @config_variant(**self._vdict)
+ class tmp_class(self):
+ pass
+
+ # Copy over the new configuguration from temporary class
+ self._cfg = deepcopy(tmp_class._cfg)
+
+ @classmethod
+ def print(self):
+ """ Print the configuration dictionary in a human readable format """
+ pprint(dict(self._cfg))
+
+ @classmethod
+ def json_to_file(self, outfile=None):
+ """ Create a JSON file with the configration """
+
+ if not outfile:
+ outfile = self.get_name() + ".json"
+ save_dict_json(outfile, self.get_config(), config_sort_order)
+ print("Configuration exported to %s" % outfile)
+
+ @classmethod
+ def get_name(self):
+ """ Return the name of the configuration """
+
+ return self._name.lower()
+
+ def get_sort_order(self):
+ """ Return an ordered list of entries in the configuration """
+
+ return self._cfg.keys()
+
+
+def config_variant(**override_params):
+ """ Class decorator that enables dynamic subclass creation for different
+ configuration combinatins. Override params can be any keyword based
+ argument of template_cfg._vict """
+
+ def class_rebuilder(cls):
+ class TfmFastModelConfig(cls):
+ override = False
+ _cfg = deepcopy(cls._cfg)
+ _tpl_cfg = deepcopy(cls._cfg)
+ _vdict = deepcopy(cls._vdict)
+ for param, value in override_params.items():
+ if param in _vdict.keys():
+ _vdict[param] = value
+ override = True
+
+ if override:
+ _vdict["variant_name_tpl"] = _vdict["variant_name_tpl"] \
+ % _vdict
+
+ # Update the configuration dependant enties
+ _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+ # Adjust the binaries based on bootloader presense
+ if _vdict["bootloader"] == "BL2":
+ _vdict["app_bin"] = override_params["app_bin"] if \
+ "app_bin" in override_params else "mcuboot.axf"
+ _vdict["data_bin"] = override_params["data_bin"] if \
+ "data_bin" in override_params \
+ else "tfm_s_ns_signed.bin"
+ _vdict["data_bin_offset"] = "0x10080000"
+ else:
+ _vdict["app_bin"] = override_params["app_bin"] if \
+ "app_bin" in override_params else "tfm_s.axf"
+ _vdict["data_bin"] = override_params["data_bin"] if \
+ "data_bin" in override_params else "tfm_ns.bin"
+ _vdict["data_bin_offset"] = "0x00100000"
+
+ # Switching from AN519 requires changing the parameter
+ # cpu0.baseline=0 -> 1
+ if _vdict["platform"] == "AN519":
+ idx = _cfg["parameters"].index("cpu0.baseline=0")
+ cpu_param = _cfg["parameters"].pop(idx).replace("=0", "=1")
+ _cfg["parameters"].append(cpu_param)
+ _cfg["application"] = _cfg["application"] % _vdict
+ _cfg["data"] = _cfg["data"] % _vdict
+
+ _name = cls._name % _vdict
+
+ return TfmFastModelConfig
+
+ return class_rebuilder
+
+
+# =================== Template Classes ===================
+class template_cfg(fpv_wrapper_config):
+ """ Creates a skeleton template configuration that allows creation of
+ configuration variants which set the parameters of:
+ buildpath, config, platform, compiler , as well as the missing test params,
+ test_rex, test_cases, test_end_string """
+
+ _name = fpv_wrapper_config._name + "_%(platform)s_%(compiler)s_" + \
+ "%(config)s_%(build_type)s_%(bootloader)s"
+ # variant dictionary allows indivudal and targeted parameter modification
+ _vdict = {
+ "build_path": "%(build_path)s",
+ "variant_name_tpl": "%(variant_name_tpl)s",
+ "app_bin_path": "%(app_bin_path)s",
+ "app_bin": "%(app_bin)s",
+ "data_bin_path": "%(data_bin_path)s",
+ "data_bin": "%(data_bin)s",
+ "data_bin_offset": "%(data_bin_offset)s",
+ "config": "%(config)s",
+ "platform": "%(platform)s",
+ "compiler": "%(compiler)s",
+ "build_type": "%(build_type)s",
+ "bootloader": "%(bootloader)s"
+ }
+
+ _cfg = deepcopy(fpv_wrapper_config._cfg)
+ _cfg["directory"] = "FVP_MPS2_11.3"
+ _cfg["terminal_log"] = "terminal_%(variant_name_tpl)s.log"
+ _cfg["bin"] = "FVP_MPS2_AEMv8M"
+ _cfg["error_on_failed"] = False
+ _cfg["application"] = (
+ "cpu0=%(build_path)s/%(variant_name_tpl)s/" +
+ "%(app_bin_path)s/%(app_bin)s")
+ _cfg["data"] = (
+ "cpu0=%(build_path)s/%(variant_name_tpl)s/%(data_bin_path)s/" +
+ "%(data_bin)s@%(data_bin_offset)s")
+ _cfg["simlimit"] = "60"
+ _cfg["parameters"] = [
+ "fvp_mps2.platform_type=2",
+ "cpu0.baseline=0",
+ "cpu0.INITVTOR_S=0x10000000",
+ "cpu0.semihosting-enable=0",
+ "fvp_mps2.DISABLE_GATING=0",
+ "fvp_mps2.telnetterminal0.start_telnet=0",
+ "fvp_mps2.telnetterminal1.start_telnet=0",
+ "fvp_mps2.telnetterminal2.start_telnet=0",
+ "fvp_mps2.telnetterminal0.quiet=1",
+ "fvp_mps2.telnetterminal1.quiet=1",
+ "fvp_mps2.telnetterminal2.quiet=1",
+ "fvp_mps2.UART0.out_file=$TERM_FILE",
+ "fvp_mps2.UART0.unbuffered_output=1",
+ "fvp_mps2.UART0.shutdown_on_eot=1",
+ "fvp_mps2.mps2_visualisation.disable-visualisation=1"]
+
+
+if __name__ == "__main__":
+ # Create Json configuration files on user request
+ pass
diff --git a/tfm_ci_pylib/lava_rpc_connector.py b/tfm_ci_pylib/lava_rpc_connector.py
index 269cbbf..885dc4a 100644
--- a/tfm_ci_pylib/lava_rpc_connector.py
+++ b/tfm_ci_pylib/lava_rpc_connector.py
@@ -19,7 +19,7 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import xmlrpc.client
import time
diff --git a/tfm_ci_pylib/structured_task.py b/tfm_ci_pylib/structured_task.py
index b97cae9..1a2f45f 100644
--- a/tfm_ci_pylib/structured_task.py
+++ b/tfm_ci_pylib/structured_task.py
@@ -19,7 +19,7 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import abc
import time
@@ -129,7 +129,7 @@
def _t_stop(self):
""" Internal class stop to be called through thread """
- print("Thead is alive0 %s" % self.is_alive())
+
if(self.is_alive()):
print("%s =========> STOP" % self.get_name())
self._stopevent.set()
diff --git a/tfm_ci_pylib/tfm_build_manager.py b/tfm_ci_pylib/tfm_build_manager.py
index dcf75de..0849a4b 100644
--- a/tfm_ci_pylib/tfm_build_manager.py
+++ b/tfm_ci_pylib/tfm_build_manager.py
@@ -18,14 +18,15 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import os
import sys
-from pprint import pprint
+from time import time
from copy import deepcopy
from .utils import gen_cfg_combinations, list_chunks, load_json,\
- save_json, print_test
+ save_json, print_test, show_progress, \
+ resolve_rel_path
from .structured_task import structuredTask
from .tfm_builder import TFM_Builder
@@ -44,31 +45,28 @@
# "CMAKE_BUILD_TYPE": "Debug"}
report=None, # File to produce report
parallel_builds=3, # Number of builds to run in parallel
- build_threads=4, # Number of threads used per build
- markdown=True, # Create markdown report
- html=True, # Create html report
- ret_code=True, # Set ret_code of script if build failed
- install=False): # Install libraries after build
-
+ build_threads=3, # Number of threads used per build
+ install=False, # Install libraries after build
+ img_sizes=False, # Use arm-none-eabi-size for size info
+ relative_paths=False): # Store relative paths in report
self._tbm_build_threads = build_threads
self._tbm_conc_builds = parallel_builds
self._tbm_install = install
- self._tbm_markdown = markdown
- self._tbm_html = html
- self._tbm_ret_code = ret_code
+ self._tbm_img_sizes = img_sizes
+ self._tbm_relative_paths = relative_paths
# Required by other methods, always set working directory first
self._tbm_work_dir = os.path.abspath(os.path.expanduser(work_dir))
self._tbm_tfm_dir = os.path.abspath(os.path.expanduser(tfm_dir))
- # Entries will be filled after sanity test on cfg_dict dring pre_exec
- self._tbm_build_dir = None
+ # Internal flag to tag simple (non combination formatted configs)
+ self.simple_config = False
self._tbm_report = report
- # TODO move them to pre_eval
self._tbm_cfg = self.load_config(cfg_dict, self._tbm_work_dir)
- self._tbm_build_cfg_list = self.parse_config(self._tbm_cfg)
+ self._tbm_build_cfg, \
+ self.tbm_common_cfg = self.parse_config(self._tbm_cfg)
super(TFM_Build_Manager, self).__init__(name="TFM_Build_Manager")
@@ -79,27 +77,125 @@
def pre_exec(self, eval_ret):
""" """
+ def override_tbm_cfg_params(self, config, override_keys, **params):
+ """ Using a dictionay as input, for each key defined in
+ override_keys it will replace the config[key] entries with
+ the key=value parameters provided """
+
+ for key in override_keys:
+ if isinstance(config[key], list):
+ config[key] = [n % params for n in config[key]]
+ elif isinstance(config[key], str):
+ config[key] = config[key] % params
+ else:
+ raise Exception("Config does not contain key %s "
+ "of type %s" % (key, config[key]))
+ return config
+
def task_exec(self):
""" Create a build pool and execute them in parallel """
build_pool = []
- for i in self._tbm_build_cfg_list:
- name = "%s_%s_%s_%s_%s" % (i.TARGET_PLATFORM,
- i.COMPILER,
- i.PROJ_CONFIG,
- i.CMAKE_BUILD_TYPE,
- "BL2" if i.WITH_MCUBOOT else "NOBL2")
+ # When a config is flagged as a single build config.
+ # Name is evaluated by config type
+ if self.simple_config:
+
+ build_cfg = deepcopy(self.tbm_common_cfg)
+
+ # Extract the common for all elements of config
+ for key in ["build_cmds", "required_artefacts"]:
+ try:
+ build_cfg[key] = build_cfg[key]["all"]
+ except KeyError:
+ build_cfg[key] = []
+ name = build_cfg["config_type"]
+
+ # Override _tbm_xxx paths in commands
+ # plafrom in not guaranteed without seeds so _tbm_target_platform
+ # is ignored
+ over_dict = {"_tbm_build_dir_": os.path.join(self._tbm_work_dir,
+ name),
+ "_tbm_code_dir_": build_cfg["codebase_root_dir"]}
+
+ build_cfg = self.override_tbm_cfg_params(build_cfg,
+ ["build_cmds",
+ "required_artefacts",
+ "artifact_capture_rex"],
+ **over_dict)
+
+ # Overrides path in expected artefacts
print("Loading config %s" % name)
- build_pool.append(TFM_Builder(name,
- self._tbm_tfm_dir,
- self._tbm_work_dir,
- dict(i._asdict()),
- self._tbm_install,
- self._tbm_build_threads))
+
+ build_pool.append(TFM_Builder(
+ name=name,
+ work_dir=self._tbm_work_dir,
+ cfg_dict=build_cfg,
+ build_threads=self._tbm_build_threads,
+ img_sizes=self._tbm_img_sizes,
+ relative_paths=self._tbm_relative_paths))
+ # When a seed pool is provided iterate through the entries
+ # and update platform spefific parameters
+ elif len(self._tbm_build_cfg):
+
+ for name, i in self._tbm_build_cfg.items():
+ # Do not modify the original config
+ build_cfg = deepcopy(self.tbm_common_cfg)
+
+ # Extract the common for all elements of config
+ for key in ["build_cmds", "required_artefacts"]:
+ try:
+ build_cfg[key] = deepcopy(self.tbm_common_cfg[key]
+ ["all"])
+ except KeyError as E:
+ build_cfg[key] = []
+
+ # Extract the platform specific elements of config
+ for key in ["build_cmds", "required_artefacts"]:
+ try:
+ if i.target_platform in self.tbm_common_cfg[key].keys():
+ build_cfg[key] += deepcopy(self.tbm_common_cfg[key]
+ [i.target_platform])
+ except Exception as E:
+ pass
+
+ # Merge the two dictionaries since the template may contain
+ # fixed and combinations seed parameters
+ cmd0 = build_cfg["config_template"] % \
+ {**dict(i._asdict()), **build_cfg}
+
+ # Prepend configuration commoand as the first cmd
+ build_cfg["build_cmds"] = [cmd0] + build_cfg["build_cmds"]
+
+ # Set the overrid params
+ over_dict = {"_tbm_build_dir_": os.path.join(
+ self._tbm_work_dir, name),
+ "_tbm_code_dir_": build_cfg["codebase_root_dir"],
+ "_tbm_target_platform_": i.target_platform}
+
+ over_params = ["build_cmds",
+ "required_artefacts",
+ "artifact_capture_rex"]
+ build_cfg = self.override_tbm_cfg_params(build_cfg,
+ over_params,
+ **over_dict)
+
+ # Overrides path in expected artefacts
+ print("Loading config %s" % name)
+
+ build_pool.append(TFM_Builder(
+ name=name,
+ work_dir=self._tbm_work_dir,
+ cfg_dict=build_cfg,
+ build_threads=self._tbm_build_threads,
+ img_sizes=self._tbm_img_sizes,
+ relative_paths=self._tbm_relative_paths))
+ else:
+ print("Could not find any configuration. Check the rejection list")
status_rep = {}
- full_rep = {}
+ build_rep = {}
+ completed_build_count = 0
print("Build: Running %d parallel build jobs" % self._tbm_conc_builds)
for build_pool_slice in list_chunks(build_pool, self._tbm_conc_builds):
@@ -118,11 +214,26 @@
# Similarly print the logs of the other builds as they complete
if build_pool_slice.index(build) != 0:
build.log()
+ completed_build_count += 1
print("Build: Finished %s" % build.get_name())
+ print("Build Progress:")
+ show_progress(completed_build_count, len(build_pool))
# Store status in report
status_rep[build.get_name()] = build.get_status()
- full_rep[build.get_name()] = build.report()
+ build_rep[build.get_name()] = build.report()
+
+ # Include the original input configuration in the report
+
+ metadata = {"input_build_cfg": self._tbm_cfg,
+ "build_dir": self._tbm_work_dir
+ if not self._tbm_relative_paths
+ else resolve_rel_path(self._tbm_work_dir),
+ "time": time()}
+
+ full_rep = {"report": build_rep,
+ "_metadata_": metadata}
+
# Store the report
self.stash("Build Status", status_rep)
self.stash("Build Report", full_rep)
@@ -134,7 +245,10 @@
def post_eval(self):
""" If a single build failed fail the test """
try:
- retcode_sum = sum(self.unstash("Build Status").values())
+ status_dict = self.unstash("Build Status")
+ if not status_dict:
+ raise Exception()
+ retcode_sum = sum(status_dict.values())
if retcode_sum != 0:
raise Exception()
return True
@@ -156,30 +270,6 @@
""" Expose the internal report to a new object for external classes """
return deepcopy(self.unstash("Build Report"))
- def print_summary(self):
- """ Print an comprehensive list of the build jobs with their status """
-
- full_rep = self.unstash("Build Report")
-
- # Filter out build jobs based on status
- fl = ([k for k, v in full_rep.items() if v['status'] == 'Failed'])
- ps = ([k for k, v in full_rep.items() if v['status'] == 'Success'])
-
- print_test(t_list=fl, status="failed", tname="Builds")
- print_test(t_list=ps, status="passed", tname="Builds")
-
- def gen_cfg_comb(self, platform_l, compiler_l, config_l, build_l, boot_l):
- """ Generate all possible configuration combinations from a group of
- lists of compiler options"""
- return gen_cfg_combinations("TFM_Build_CFG",
- ("TARGET_PLATFORM COMPILER PROJ_CONFIG"
- " CMAKE_BUILD_TYPE WITH_MCUBOOT"),
- platform_l,
- compiler_l,
- config_l,
- build_l,
- boot_l)
-
def load_config(self, config, work_dir):
try:
# passing config_name param supersseeds fileparam
@@ -209,52 +299,147 @@
print("Error:%s \nCould not load a valid config" % e)
sys.exit(1)
- pprint(ret_cfg)
return ret_cfg
def parse_config(self, cfg):
""" Parse a valid configuration file into a set of build dicts """
- # Generate a list of all possible confugration combinations
- full_cfg = self.gen_cfg_comb(cfg["platform"],
- cfg["compiler"],
- cfg["config"],
- cfg["build"],
- cfg["with_mcuboot"])
+ ret_cfg = {}
- # Generate a list of all invalid combinations
- rejection_cfg = []
+ # Config entries which are not subject to changes during combinations
+ static_cfg = cfg["common_params"]
- for k in cfg["invalid"]:
- # Pad the omitted values with wildcard char *
- res_list = list(k) + ["*"] * (5 - len(k))
+ # Converth the code path to absolute path
+ abs_code_dir = static_cfg["codebase_root_dir"]
+ abs_code_dir = os.path.abspath(os.path.expanduser(abs_code_dir))
+ static_cfg["codebase_root_dir"] = abs_code_dir
- print("Working on rejection input: %s" % (res_list))
+ # seed_params is an optional field. Do not proccess if it is missing
+ if "seed_params" in cfg:
+ comb_cfg = cfg["seed_params"]
+ # Generate a list of all possible confugration combinations
+ ret_cfg = TFM_Build_Manager.generate_config_list(comb_cfg,
+ static_cfg)
- # Key order matters. Use index to retrieve default values When
- # wildcard * char is present
- _cfg_keys = ["platform",
- "compiler",
- "config",
- "build",
- "with_mcuboot"]
+ # invalid is an optional field. Do not proccess if it is missing
+ if "invalid" in cfg:
+ # Invalid configurations(Do not build)
+ invalid_cfg = cfg["invalid"]
+ # Remove the rejected entries from the test list
+ rejection_cfg = TFM_Build_Manager.generate_rejection_list(
+ comb_cfg,
+ static_cfg,
+ invalid_cfg)
- # Replace wildcard ( "*") entries with every inluded in cfg variant
- for n in range(len(res_list)):
- res_list[n] = [res_list[n]] if res_list[n] != "*" \
- else cfg[_cfg_keys[n]]
+ # Subtract the two configurations
+ ret_cfg = {k: v for k, v in ret_cfg.items()
+ if k not in rejection_cfg}
+ self.simple_config = False
+ else:
+ self.simple_config = True
+ return ret_cfg, static_cfg
- rejection_cfg += self.gen_cfg_comb(*res_list)
+ # ----- Override bellow methods when subclassing for other projects ----- #
- # Notfy the user for the rejected configuations
- for i in rejection_cfg:
+ def print_summary(self):
+ """ Print an comprehensive list of the build jobs with their status """
- name = "%s_%s_%s_%s_%s" % (i.TARGET_PLATFORM,
- i.COMPILER,
- i.PROJ_CONFIG,
- i.CMAKE_BUILD_TYPE,
- "BL2" if i.WITH_MCUBOOT else "NOBL2")
- print("Rejecting config %s" % name)
+ try:
+ full_rep = self.unstash("Build Report")["report"]
+ fl = ([k for k, v in full_rep.items() if v['status'] == 'Failed'])
+ ps = ([k for k, v in full_rep.items() if v['status'] == 'Success'])
+ except Exception as E:
+ print("No report generated")
+ return
+ if fl:
+ print_test(t_list=fl, status="failed", tname="Builds")
+ if ps:
+ print_test(t_list=ps, status="passed", tname="Builds")
- # Subtract the two lists and convert to dictionary
- return list(set(full_cfg) - set(rejection_cfg))
+ @staticmethod
+ def generate_config_list(seed_config, static_config):
+ """ Generate all possible configuration combinations from a group of
+ lists of compiler options"""
+ config_list = []
+
+ if static_config["config_type"] == "tf-m":
+ cfg_name = "TFM_Build_CFG"
+ # Ensure the fieds are sorted in the desired order
+ # seed_config can be a subset of sort order for configurations with
+ # optional parameters.
+ tags = [n for n in static_config["sort_order"]
+ if n in seed_config.keys()]
+
+ data = []
+ for key in tags:
+ data.append(seed_config[key])
+ config_list = gen_cfg_combinations(cfg_name,
+ " ".join(tags),
+ *data)
+ else:
+ print("Not information for project type: %s."
+ " Please check config" % static_config["config_type"])
+
+ ret_cfg = {}
+ # Notify the user for the rejected configuations
+ for i in config_list:
+ # Convert named tuples to string with boolean support
+ i_str = "_".join(map(lambda x: repr(x)
+ if isinstance(x, bool) else x, list(i)))
+
+ # Replace bollean vaiables with more BL2/NOBL2 and use it as"
+ # configuration name.
+ ret_cfg[i_str.replace("True", "BL2").replace("False", "NOBL2")] = i
+
+ return ret_cfg
+
+ @staticmethod
+ def generate_rejection_list(seed_config,
+ static_config,
+ rejection_list):
+ rejection_cfg = {}
+
+ if static_config["config_type"] == "tf-m":
+
+ # If rejection list is empty do nothing
+ if not rejection_list:
+ return rejection_cfg
+
+ tags = [n for n in static_config["sort_order"]
+ if n in seed_config.keys()]
+ sorted_default_lst = [seed_config[k] for k in tags]
+
+ # If tags are not alligned with rejection list entries quit
+ if len(tags) != len(rejection_list[0]):
+ print(len(tags), len(rejection_list[0]))
+ print("Error, tags should be assigned to each "
+ "of the rejection inputs")
+ return []
+
+ # Replace wildcard ( "*") entries with every
+ # inluded in cfg variant
+ for k in rejection_list:
+ # Pad the omitted values with wildcard char *
+ res_list = list(k) + ["*"] * (5 - len(k))
+ print("Working on rejection input: %s" % (res_list))
+
+ for n in range(len(res_list)):
+
+ res_list[n] = [res_list[n]] if res_list[n] != "*" \
+ else sorted_default_lst[n]
+
+ # Generate a configuration and a name for the completed array
+ rj_cfg = TFM_Build_Manager.generate_config_list(
+ dict(zip(tags, res_list)),
+ static_config)
+
+ # Append the configuration to the existing ones
+ rejection_cfg = {**rejection_cfg, **rj_cfg}
+
+ # Notfy the user for the rejected configuations
+ for i in rejection_cfg.keys():
+ print("Rejecting config %s" % i)
+ else:
+ print("Not information for project type: %s."
+ " Please check config" % static_config["config_type"])
+ return rejection_cfg
diff --git a/tfm_ci_pylib/tfm_builder.py b/tfm_ci_pylib/tfm_builder.py
index 37a1315..1908a8e 100644
--- a/tfm_ci_pylib/tfm_builder.py
+++ b/tfm_ci_pylib/tfm_builder.py
@@ -18,58 +18,48 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import os
-from .utils import *
+import re
import shutil
+from .utils import *
from .structured_task import structuredTask
class TFM_Builder(structuredTask):
""" Wrap around tfm cmake system and spawn a thread to build the project.
"""
- _tfb_build_params = ["TARGET_PLATFORM",
- "COMPILER",
- "PROJ_CONFIG",
- "CMAKE_BUILD_TYPE",
- "WITH_MCUBOOT"
- ]
-
- _tfb_build_template = ("cmake -G \"Unix Makefiles\" -DPROJ_CONFIG=`"
- "readlink -f %(PROJ_CONFIG)s.cmake` "
- "-DTARGET_PLATFORM=%(TARGET_PLATFORM)s "
- "-DCOMPILER=%(COMPILER)s "
- "-DCMAKE_BUILD_TYPE=%(CMAKE_BUILD_TYPE)s "
- "-DBL2=%(WITH_MCUBOOT)s "
- "%(TFM_ROOT)s")
-
def __init__(self,
name, # Proccess name
- tfm_dir, # TFM root directory
work_dir, # Current working directory(ie logs)
cfg_dict, # Input config dictionary of the following form
# input_dict = {"PROJ_CONFIG": "ConfigRegression",
# "TARGET_PLATFORM": "MUSCA_A",
# "COMPILER": "ARMCLANG",
# "CMAKE_BUILD_TYPE": "Debug"}
- install=False, # Install library after build
- build_threads=4, # Number of CPU thrads used in build
- silent=False): # Silence stdout ouptut
+ build_threads=4, # Number of CPU thrads used in build
+ silent=False, # Silence stdout ouptut
+ img_sizes=False, # Use arm-none-eabi-size for size info
+ relative_paths=False): # Store relative paths in report
self._tfb_cfg = cfg_dict
self._tfb_build_threads = build_threads
- self._tfb_install = install
self._tfb_silent = silent
+ self._tfb_img_sizes = img_sizes
+ self._tfb_relative_paths = relative_paths
self._tfb_binaries = []
# Required by other methods, always set working directory first
self._tfb_work_dir = os.path.abspath(os.path.expanduser(work_dir))
- self._tfb_tfm_dir = os.path.abspath(os.path.expanduser(tfm_dir))
+ # Override code_base_dir with abspath
+ _code_dir = self._tfb_cfg["codebase_root_dir"]
+ self._tfb_code_dir = os.path.abspath(os.path.expanduser(_code_dir))
# Entries will be filled after sanity test on cfg_dict dring pre_exec
self._tfb_build_dir = None
self._tfb_log_f = None
+
super(TFM_Builder, self).__init__(name=name)
def mute(self):
@@ -77,10 +67,14 @@
def log(self):
""" Print and return the contents of log file """
- with open(self._tfb_log_f, "r") as F:
- log = F.read()
- print(log)
- return log
+ try:
+ with open(self._tfb_log_f, "r") as F:
+ log = F.read()
+ print(log)
+ return log
+ except FileNotFoundError:
+ print("Log %s not found" % self._tfb_log_f)
+ return ""
def report(self):
"""Return the report on the job """
@@ -89,14 +83,10 @@
def pre_eval(self):
""" Tests that need to be run in set-up state """
- # Test that all required entries exist in config
- diff = list(set(self._tfb_build_params) - set(self._tfb_cfg.keys()))
- if diff:
- print("Cound't find require build entry: %s in config" % diff)
+ if not os.path.isdir(self._tfb_code_dir):
+ print("Missing code-base directory:", self._tfb_code_dir)
return False
- # TODO check validity of passed config values
- # TODO test detection of srec
- # self.srec_path = shutil.which("srec_cat")
+
return True
def pre_exec(self, eval_ret):
@@ -107,14 +97,6 @@
# Ensure we have a clean build directory
shutil.rmtree(self._tfb_build_dir, ignore_errors=True)
- self._tfb_cfg["TFM_ROOT"] = self._tfb_tfm_dir
-
- # Append the path for the config
- self._tfb_cfg["PROJ_CONFIG"] = os.path.join(self._tfb_tfm_dir,
- "configs",
- self._tfb_cfg[("PROJ_"
- "CONFIG")])
-
# Log will be placed in work directory, named as the build dir
self._tfb_log_f = "%s.log" % self._tfb_build_dir
@@ -123,100 +105,6 @@
if not os.path.exists(p):
os.makedirs(p)
- # Calcuate a list of expected binaries
- binaries = []
-
- # If install is asserted pick the iems from the appropriate location
- if self._tfb_install:
-
- fvp_path = os.path.join(self._tfb_build_dir,
- "install", "outputs", "fvp")
- platform_path = os.path.join(self._tfb_build_dir,
- "install",
- "outputs",
- self._tfb_cfg["TARGET_PLATFORM"])
-
- # Generate a list of binaries included in both directories
- common_bin_list = ["tfm_%s.%s" % (s, e) for s in ["s", "ns"]
- for e in ["bin", "axf"]]
- if self._tfb_cfg["WITH_MCUBOOT"]:
- common_bin_list += ["mcuboot.%s" % e for e in ["bin", "axf"]]
-
- # When building with bootloader extra binaries are expected
- binaries += [os.path.join(platform_path, b) for b in
- ["tfm_sign.bin"]]
- binaries += [os.path.join(fvp_path, b) for b in
- ["tfm_s_ns_signed.bin"]]
-
- binaries += [os.path.join(p, b) for p in [fvp_path, platform_path]
- for b in common_bin_list]
-
- # Add Musca required binaries
- if self._tfb_cfg["TARGET_PLATFORM"] == "MUSCA_A":
- binaries += [os.path.join(platform_path,
- "musca_firmware.hex")]
-
- self._tfb_binaries = binaries
-
- else:
- binaries += [os.path.join(self._tfb_build_dir, "app", "tfm_ns")]
- binaries += [os.path.join(self._tfb_build_dir, "app",
- "secure_fw", "tfm_s")]
- if self._tfb_cfg["WITH_MCUBOOT"]:
- binaries += [os.path.join(self._tfb_build_dir,
- "bl2", "ext", "mcuboot", "mcuboot")]
-
- ext = ['.bin', '.axf']
- self._tfb_binaries = ["%s%s" % (n, e) for n in binaries
- for e in ext]
-
- # Add Musca required binaries
- if self._tfb_cfg["TARGET_PLATFORM"] == "MUSCA_A":
- self._tfb_binaries += [os.path.join(self._tfb_build_dir,
- "tfm_sign.bin")]
- self._tfb_binaries += [os.path.join(self._tfb_build_dir,
- "musca_firmware.hex")]
-
- def get_binaries(self,
- bootl=None,
- bin_s=None,
- bin_ns=None,
- bin_sign=None,
- filt=None):
- """ Return the absolute location of binaries (from config)
- if they exist. Can add a filter parameter which will only
- consider entries with /filter/ in their path as a directory """
- ret_boot = None
- ret_bin_ns = None
- ret_bin_s = None
- ret_bin_sign = None
-
- # Apply filter as a /filter/ string to the binary list
- filt = "/" + filt + "/" if filter else None
- binaries = list(filter(lambda x: filt in x, self._tfb_binaries)) \
- if filt else self._tfb_binaries
-
- for obj_file in binaries:
- fname = os.path.split(obj_file)[-1]
- if bootl:
- if fname == bootl:
- ret_boot = obj_file
- continue
- if bin_s:
- if fname == bin_s:
- ret_bin_s = obj_file
- continue
-
- if bin_ns:
- if fname == bin_ns:
- ret_bin_ns = obj_file
- continue
- if bin_sign:
- if fname == bin_sign:
- ret_bin_sign = obj_file
- continue
- return [ret_boot, ret_bin_s, ret_bin_ns, ret_bin_sign]
-
def task_exec(self):
""" Main tasks """
@@ -224,141 +112,100 @@
self.set_status(-1)
# Go to build directory
os.chdir(self._tfb_build_dir)
- # Compile the build commands
- cmake_cmd = self._tfb_build_template % self._tfb_cfg
- build_cmd = "cmake --build ./ -- -j %s" % self._tfb_build_threads
+
+ build_cmds = self._tfb_cfg["build_cmds"]
+
+ threads_no_rex = re.compile(r'.*(-j\s?(\d+))')
# Pass the report to later stages
- rep = {"build_cmd": "%s" % build_cmd,
- "cmake_cmd": "%s" % cmake_cmd}
+ rep = {"build_cmd": "%s" % ",".join(build_cmds)}
self.stash("Build Report", rep)
- # Calll camke to configure the project
- if not subprocess_log(cmake_cmd,
- self._tfb_log_f,
- prefix=cmake_cmd,
- silent=self._tfb_silent):
+ # Calll cmake to configure the project
+ for build_cmd in build_cmds:
+ # if a -j parameter is passed as user argument
+ user_set_threads_match = threads_no_rex.findall(build_cmd)
+
+ if user_set_threads_match:
+ # Unpack the regex groups (fullmatch, decimal match)
+ user_jtxt, user_set_threads = user_set_threads_match[0]
+ if int(user_set_threads) > self._tfb_build_threads:
+ print("Ignoring user requested n=%s threads because it"
+ " exceeds the maximum thread set ( %d )" %
+ (user_set_threads, self._tfb_build_threads))
+ thread_no = self._tfb_build_threads
+ else:
+ print("Using %s build threads" % user_set_threads)
+ thread_no = user_set_threads
+ build_cmd = build_cmd.replace(user_jtxt,
+ "-j %s " % thread_no)
+
# Build it
if subprocess_log(build_cmd,
self._tfb_log_f,
append=True,
prefix=build_cmd,
silent=self._tfb_silent):
+
raise Exception("Build Failed please check log: %s" %
self._tfb_log_f)
- else:
- raise Exception("Cmake Failed please check log: %s" %
- self._tfb_log_f)
- if self._tfb_install:
- install_cmd = "cmake --build ./ -- -j install"
- if subprocess_log(install_cmd,
- self._tfb_log_f,
- append=True,
- prefix=install_cmd,
- silent=self._tfb_silent):
- raise Exception(("Make install Failed."
- " please check log: %s") % self._tfb_log_f)
- if self._tfb_cfg["TARGET_PLATFORM"] == "MUSCA_A":
- boot_f, s_bin, ns_bin, sns_signed_bin = self.get_binaries(
- bootl="mcuboot.bin",
- bin_s="tfm_s.bin",
- bin_ns="tfm_ns.bin",
- bin_sign="tfm_sign.bin",
- filt="MUSCA_A")
- self.convert_to_hex(boot_f, sns_signed_bin)
self._t_stop()
- def sign_img(self, secure_bin, non_secure_bin):
- """Join a secure and non secure image and sign them"""
-
- imgtool_dir = os.path.join(self._tfb_tfm_dir,
- "bl2/ext/mcuboot/scripts/")
- flash_layout = os.path.join(self._tfb_tfm_dir,
- "platform/ext/target/musca_a/"
- "partition/flash_layout.h")
- sign_cert = os.path.join(self._tfb_tfm_dir,
- "bl2/ext/mcuboot/root-rsa-2048.pem")
- sns_unsigned_bin = os.path.join(self._tfb_build_dir,
- "sns_unsigned.bin")
- sns_signed_bin = os.path.join(self._tfb_build_dir, "sns_signed.bin")
-
- # Early versions of the tool hard relative imports, run from its dir
- os.chdir(imgtool_dir)
- assemble_cmd = ("python3 assemble.py -l %(layout)s -s %(s)s "
- "-n %(ns)s -o %(sns)s") % {"layout": flash_layout,
- "s": secure_bin,
- "ns": non_secure_bin,
- "sns": sns_unsigned_bin
- }
- sign_cmd = ("python3 imgtool.py sign -k %(cert)s --align 1 -v "
- "1.0 -H 0x400 --pad 0x30000 "
- "%(sns)s %(sns_signed)s") % {"cert": sign_cert,
- "sns": sns_unsigned_bin,
- "sns_signed": sns_signed_bin
- }
- run_proccess(assemble_cmd)
- run_proccess(sign_cmd)
- # Return to build directory
- os.chdir(self._tfb_build_dir)
- return sns_signed_bin
-
- def convert_to_hex(self,
- boot_bin,
- sns_signed_bin,
- qspi_base=0x200000,
- boot_size=0x10000):
- """Convert a signed image to an intel hex format with mcuboot """
- if self._tfb_install:
- platform_path = os.path.join(self._tfb_build_dir,
- "install",
- "outputs",
- self._tfb_cfg["TARGET_PLATFORM"])
- firmware_hex = os.path.join(platform_path, "musca_firmware.hex")
- else:
- firmware_hex = os.path.join(self._tfb_build_dir,
- "musca_firmware.hex")
-
- img_offset = qspi_base + boot_size
- merge_cmd = ("srec_cat %(boot)s -Binary -offset 0x%(qspi_offset)x "
- "%(sns_signed)s -Binary -offset 0x%(img_offset)x "
- "-o %(hex)s -Intel") % {"boot": boot_bin,
- "sns_signed": sns_signed_bin,
- "hex": firmware_hex,
- "qspi_offset": qspi_base,
- "img_offset": img_offset
- }
- run_proccess(merge_cmd)
- return
-
def post_eval(self):
""" Verify that the artefacts exist """
print("%s Post eval" % self.get_name())
ret_eval = False
rep = self.unstash("Build Report")
- missing_binaries = list(filter(lambda x: not os.path.isfile(x),
- self._tfb_binaries))
- if len(missing_binaries):
- print("ERROR: Could not locate the following binaries:")
- print("\n".join(missing_binaries))
-
- # Update the artifacts to not include missing ones
- artf = [n for n in self._tfb_binaries if n not in missing_binaries]
- # TODO update self._tfb_binaries
- ret_eval = False
- else:
- print("SUCCESS: Produced binaries:")
- print("\n".join(self._tfb_binaries))
- ret_eval = True
-
- artf = self._tfb_binaries
+ artefacts = list_filtered_tree(self._tfb_work_dir, r'%s' %
+ self._tfb_cfg["artifact_capture_rex"])
# Add artefact related information to report
rep["log"] = self._tfb_log_f
- rep["missing_artefacts"] = missing_binaries
- rep["artefacts"] = artf
+
+ if not len(artefacts):
+ print("ERROR: Could not capture any binaries:")
+
+ # TODO update self._tfb_binaries
+ ret_eval = False
+ else:
+ print("SUCCESS: Produced the following binaries:")
+ print("\n\t".join(artefacts))
+ ret_eval = True
+
+ rep["artefacts"] = artefacts
+
+ # Proccess the artifacts into file structures
+ art_files = {}
+ for art_item in artefacts:
+ art_f = {"pl_source": 1,
+ "resource": art_item if not self._tfb_relative_paths
+ else resolve_rel_path(art_item),
+ "size": {"bytes": str(os.path.getsize(art_item))}
+ }
+ if self._tfb_img_sizes and ".axf" in art_item:
+ eabi_size, _ = arm_non_eabi_size(art_item)
+ art_f["size"]["text"] = eabi_size["text"]
+ art_f["size"]["data"] = eabi_size["data"]
+ art_f["size"]["bss"] = eabi_size["bss"]
+ # filename is used as key for artfacts
+ art_files[os.path.split(art_item)[-1]] = art_f
+ rep["artefacts"] = art_files
+
+ if "required_artefacts" in self._tfb_cfg.keys():
+ if len(self._tfb_cfg["required_artefacts"]):
+ print("Searching for required binaries")
+ missing_binaries = list(filter(lambda x: not os.path.isfile(x),
+ self._tfb_cfg["required_artefacts"]))
+ if len(missing_binaries):
+ rep["missing_artefacts"] = missing_binaries
+ print("ERROR: Missing required artefacts:")
+ print("\n".join(missing_binaries))
+ ret_eval = False
+ else:
+ ret_eval = True
rep["status"] = "Success" if ret_eval else "Failed"
self.stash("Build Report", rep)
@@ -371,3 +218,7 @@
print("TFM Builder %s was Successful" % self.get_name())
else:
print("TFM Builder %s was UnSuccessful" % self.get_name())
+
+
+if __name__ == "__main__":
+ pass
diff --git a/tfm_ci_pylib/utils.py b/tfm_ci_pylib/utils.py
index 7d1ca46..2096b8b 100755
--- a/tfm_ci_pylib/utils.py
+++ b/tfm_ci_pylib/utils.py
@@ -19,16 +19,20 @@
__email__ = "minos.galanakis@linaro.org"
__project__ = "Trusted Firmware-M Open CI"
__status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
import os
+import re
import sys
import yaml
+import requests
import argparse
import json
import itertools
+import xmltodict
+from shutil import move
from collections import OrderedDict, namedtuple
-from subprocess import Popen, PIPE, STDOUT
+from subprocess import Popen, PIPE, STDOUT, check_output
def detect_python3():
@@ -37,6 +41,22 @@
return sys.version_info > (3, 0)
+def find_missing_files(file_list):
+ """ Return the files that dot not exist in the file_list """
+
+ F = set(file_list)
+ T = set(list(filter(os.path.isfile, file_list)))
+ return list(F.difference(T))
+
+
+def resolve_rel_path(target_path, origin_path=os.getcwd()):
+ """ Resolve relative path from origin to target. By default origin
+ path is current working directory. """
+
+ common = os.path.commonprefix([origin_path, target_path])
+ return os.path.relpath(target_path, common)
+
+
def print_test_dict(data_dict,
pad_space=80,
identation=5,
@@ -248,6 +268,36 @@
return pcss.returncode
+def get_pid_status(pid):
+ """ Read the procfc in Linux machines to determine a proccess's statusself.
+ Returns status if proccess exists or None if it does not """
+
+ try:
+ with open("/proc/%s/status" % pid, "r") as F:
+ full_state = F.read()
+ return re.findall(r'(?:State:\t[A-Z]{1} \()(\w+)',
+ full_state, re.MULTILINE)[0]
+ except Exception as e:
+ print("Exception", e)
+
+
+def check_pid_status(pid, status_list):
+ """ Check a proccess's status againist a provided lists and return True
+ if the proccess exists and has a status included in the list. (Linux) """
+
+ pid_status = get_pid_status(pid)
+
+ if not pid_status:
+ print("PID %s does not exist." % pid)
+ return False
+
+ ret = pid_status in status_list
+ # TODO Remove debug print
+ if not ret:
+ print("PID status %s not in %s" % (pid_status, ",".join(status_list)))
+ return ret
+
+
def list_chunks(l, n):
""" Yield successive n-sized chunks from l. """
@@ -276,6 +326,17 @@
return [build_config(*x) for x in itertools.product(*args)]
+def show_progress(current_count, total_count):
+ """ Display the percent progress percentage of input metric a over b """
+
+ progress = int((current_count / total_count) * 100)
+ completed_count = int(progress * 0.7)
+ remaining_count = 70 - completed_count
+ print("[ %s%s | %d%% ]" % ("#" * completed_count,
+ "~" * remaining_count,
+ progress))
+
+
def get_cmd_args(descr="", parser=None):
""" Parse command line arguments """
# Parse command line arguments to override config
@@ -283,3 +344,230 @@
if not parser:
parser = argparse.ArgumentParser(description=descr)
return parser.parse_args()
+
+
+def arm_non_eabi_size(filename):
+ """ Run arm-non-eabi-size command and parse the output using regex. Will
+ return a tuple with the formated data as well as the raw output of the
+ command """
+
+ size_info_rex = re.compile(r'^\s+(?P<text>[0-9]+)\s+(?P<data>[0-9]+)\s+'
+ r'(?P<bss>[0-9]+)\s+(?P<dec>[0-9]+)\s+'
+ r'(?P<hex>[0-9a-f]+)\s+(?P<file>\S+)',
+ re.MULTILINE)
+
+ eabi_size = check_output(["arm-none-eabi-size",
+ filename],
+ timeout=2).decode('UTF-8').rstrip()
+
+ size_data = re.search(size_info_rex, eabi_size)
+
+ return [{"text": size_data.group("text"),
+ "data": size_data.group("data"),
+ "bss": size_data.group("bss"),
+ "dec": size_data.group("dec"),
+ "hex": size_data.group("hex")}, eabi_size]
+
+
+def list_subdirs(directory):
+
+ directory = os.path.abspath(directory)
+ abs_sub_dirs = [os.path.join(directory, n) for n in os.listdir(directory)]
+ return [n for n in abs_sub_dirs if os.path.isdir(os.path.realpath(n))]
+
+
+def get_local_git_info(directory, json_out_f=None):
+ """ Extract git related information from a target directory. It allows
+ optional export to json file """
+
+ directory = os.path.abspath(directory)
+ cur_dir = os.path.abspath(os.getcwd())
+ os.chdir(directory)
+
+ # System commands to collect information
+ cmd1 = "git log HEAD -n 1 --pretty=format:'%H%x09%an%x09%ae%x09%ai%x09%s'"
+ cmd2 = "git log HEAD -n 1 --pretty=format:'%b'"
+ cmd3 = "git remote -v | head -n 1 | awk '{ print $2}';"
+ cmd4 = ("git ls-remote --heads origin | "
+ "grep $(git rev-parse HEAD) | cut -d / -f 3")
+
+ git_info_rex = re.compile(r'(?P<body>^[\s\S]*?)((?:Change-Id:\s)'
+ r'(?P<change_id>.*)\n)((?:Signed-off-by:\s)'
+ r'(?P<sign_off>.*)\n?)', re.MULTILINE)
+
+ proc_res = []
+ for cmd in [cmd1, cmd2, cmd3, cmd4]:
+ r, e = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
+ if e:
+ print("Error", e)
+ return
+ else:
+ try:
+ txt_body = r.decode('ascii')
+ except UnicodeDecodeError as E:
+ txt_body = r.decode('utf-8')
+ proc_res.append(txt_body.rstrip())
+
+ # Unpack and tag the data
+ hash, name, email, date, subject = proc_res[0].split('\t')
+
+ _raw_body = proc_res[1]
+ _bd_items = re.findall(r'(Signed-off-by|Change-Id)', _raw_body,
+ re.MULTILINE)
+
+ signed_off = None
+ body = None
+ change_id = None
+ # If both sign-off and gerrit-id exist
+ if len(_bd_items) == 2:
+ m = git_info_rex.search(_raw_body)
+ print(git_info_rex.findall(_raw_body))
+ if m is not None:
+ match_dict = m.groupdict()
+ if "body" in match_dict.keys():
+ body = match_dict["body"]
+ if "sign_off" in match_dict.keys():
+ signed_off = match_dict["sign_off"]
+ if "change_id" in match_dict.keys():
+ change_id = match_dict["change_id"]
+ else:
+ print("Error: Could not regex parse message", repr(_raw_body))
+ body = _raw_body
+ # If only one of sign-off / gerrit-id exist
+ elif len(_bd_items) == 1:
+ _entry_key = _bd_items[0]
+ body, _extra = _raw_body.split(_entry_key)
+ if _entry_key == "Change-Id":
+ change_id = _extra
+ else:
+ signed_off = _extra
+ # If the message contains commit message body only
+ else:
+ body = _raw_body
+
+ # Attempt to read the branch from Gerrit Trigger
+ try:
+ branch = os.environ["GERRIT_BRANCH"]
+ # IF not compare the commit hash with the remote branches to determine the
+ # branch of origin. Warning this assumes that only one branch has its head
+ # on this commit.
+ except KeyError as E:
+ branch = proc_res[3]
+
+ remote = proc_res[2]
+ # Internal Gerrit specific code
+ # Intended for converting the git remote to a more usuable url
+ known_remotes = ["https://gerrit.oss.arm.com",
+ "http://gerrit.mirror.oss.arm.com"]
+
+ for kr in known_remotes:
+ if kr in remote:
+ print("Applying Remote specific patch to remote", kr)
+
+ remote = remote.split(kr)[-1][1:]
+ print("REMOTE", remote)
+ remote = "%s/gitweb?p=%s.git;a=commit;h=%s" % (kr, remote, hash)
+ break
+
+ out = {"author": name.strip(),
+ "email": email.strip(),
+ "dir": directory.strip(),
+ "remote": remote.strip(),
+ "date": date.strip(),
+ "commit": hash.strip(),
+ "subject": subject.strip(),
+ "message": body.strip(),
+ "change_id": change_id.strip() if change_id is not None else "N.A",
+ "sign_off": signed_off.strip() if signed_off is not None else "N.A",
+ "branch": branch.strip()}
+
+ # Restore the directory path
+ os.chdir(cur_dir)
+ if json_out_f:
+ save_json(json_out_f, out)
+ return out
+
+
+def get_remote_git_info(url):
+ """ Collect git information from a Linux Kernel web repository """
+
+ auth_rex = re.compile(r'(?:<th>author</th>.*)(?:span>)(.*)'
+ r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
+ # commiter_rex = re.compile(r'(?:<th>committer</th>.*)(?:</div>)(.*)'
+ # r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
+ subject_rex = re.compile(r'(?:\'commit-subject\'>)(.*)(?:</div>)')
+ body_rex = re.compile(r'(?:\'commit-msg\'>)([\s\S^<]*)(?:</div>'
+ r'<div class=\'diffstat-header\'>)', re.MULTILINE)
+
+ content = requests.get(url).text
+ author, date = re.search(auth_rex, content).groups()
+ subject = re.search(subject_rex, content).groups()[0]
+ body = re.search(body_rex, content).groups()[0]
+ remote, hash = url.split("=")
+
+ outdict = {"author": author,
+ "remote": remote[:-3],
+ "date": date,
+ "commit": hash,
+ "subject": subject,
+ "message": body}
+ # Clean up html noise
+ return {k: re.sub(r'&[a-z]t;?', "", v) for k, v in outdict.items()}
+
+
+def convert_git_ref_path(dir_path):
+ """ If a git long hash is detected in a path move it to a short hash """
+
+ # Detect a git hash on a directory naming format of name_{hash},
+ # {hash}, name-{hash}
+ git_hash_rex = re.compile(r'(?:[_|-])*([a-f0-9]{40})')
+
+ # if checkout directory name contains a git reference convert to short
+ git_hash = git_hash_rex.findall(dir_path)
+ if len(git_hash):
+ d = dir_path.replace(git_hash[0], git_hash[0][:7])
+ print("Renaming %s -> %s", dir_path, d)
+ move(dir_path, d)
+ dir_path = d
+ return dir_path
+
+
+def xml_read(file):
+ """" Read the contects of an xml file and convert it to python object """
+
+ data = None
+ try:
+ with open(file, "r") as F:
+ data = xmltodict.parse(F.read())
+ except Exception as E:
+ print("Error", E)
+ return data
+
+
+def list_filtered_tree(directory, rex_filter=None):
+ ret = []
+ for path, subdirs, files in os.walk(directory):
+ for fname in files:
+ ret.append(os.path.join(path, fname))
+ if rex_filter:
+ rex = re.compile(rex_filter)
+ return [n for n in ret if rex.search(n)]
+ else:
+ return ret
+
+
+def gerrit_patch_from_changeid(remote, change_id):
+ """ Use Gerrit's REST api for a best effort to retrieve the url of the
+ patch-set under review """
+
+ try:
+ r = requests.get('%s/changes/%s' % (remote, change_id),
+ headers={'Accept': 'application/json'})
+ resp_data = r.text[r.text.find("{"):].rstrip()
+ change_no = json.loads(resp_data)["_number"]
+ return "%s/#/c/%s" % (remote, change_no)
+ except Exception as E:
+ print("Failed to retrieve change (%s) from URL %s" % (change_id,
+ remote))
+ print("Exception Thrown:", E)
+ raise Exception()
diff --git a/virtualevn/requirements_tfm_python2.txt b/virtualevn/requirements_tfm_python2.txt
deleted file mode 100644
index add452d..0000000
--- a/virtualevn/requirements_tfm_python2.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2018-2019, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-#-------------------------------------------------------------------------------
-pygments 2.2.0
diff --git a/virtualevn/requirements_tfm_python3.txt b/virtualevn/requirements_tfm_python3.txt
deleted file mode 100644
index bdcac54..0000000
--- a/virtualevn/requirements_tfm_python3.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2018-2019, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-#-------------------------------------------------------------------------------
-Jinja2==2.10
-MarkupSafe==1.0
-PyYAML==3.12
-pycryptodome==3.6.6
-pyasn1==0.1.9