Open CI Scripts: Feature Update

    * build_helper: Added --install argument to execute cmake install
    * build_helper: Added the capability to parse axf files for
      code/data/bss sizes and capture it to report
    * build_helper: Added --relative-paths to calculate paths relative
      to the root of the workspace
    * build_helper_configs: Full restructure of config modules.
      Extra build commands and expected artefacts can be defined per
      platform basis
    * Checkpatch: Added directive to ignore --ignore SPDX_LICENSE_TAG
      and added the capability to run only on files changed in patch.
    * CppCheck adjusted suppression directories for new external
      libraries and code-base restructure
    * Added fastmodel dispatcher. It will wrap around fastmodels
      and test against a dynamically defined test_map. Fed with an
      input of the build summary fastmodel dispatcher will detect
      builds which have tests in the map and run them.
    * Added Fastmodel configs for AN519 and AN521 platforms
    * lava_helper. Added arguments for --override-jenkins-job/
      --override-jenkins-url
    * Adjusted JINJA2 template to include build number and
      enable the overrides.
    * Adjusted lava helper configs to support dual platform firmware
      and added CoreIPC config
    * Added report parser module to create/read/evaluate and
      modify reports. Bash scripts for cppcheck checkpatch summaries
      have been removed.
    * Adjusted run_cppcheck/run_checkpatch for new project libraries,
      new codebase structure and other tweaks.
    * Restructured build manager, decoupling it from the tf-m
      cmake requirements. Build manager can now dynamically build a
      configuration from combination of parameters or can just execute
      an array of build commands. Hardcoded tf-m assumptions have been
      removed and moved into the configuration space.
    * Build system can now produce MUSCA_A/ MUSCA_B1 binaries as well
      as intel HEX files.
    * Updated the utilities snippet collection in the tfm-ci-pylib.

Change-Id: Ifad7676e1cd47e3418e851b56dbb71963d85cd88
Signed-off-by: Minos Galanakis <minos.galanakis@linaro.org>
diff --git a/tfm_ci_pylib/fastmodel_wrapper/__init__.py b/tfm_ci_pylib/fastmodel_wrapper/__init__.py
new file mode 100644
index 0000000..d59ebcc
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/__init__.py
@@ -0,0 +1,21 @@
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+__all__ = ["config_templates",
+           "fastmodel_config_map",
+           "fastmodel_wrapper",
+           "fastmodel_wrapper_config"]
+
+from .fastmodel_wrapper_config import config_variant, fpv_wrapper_config
+from .fastmodel_wrapper import FastmodelWrapper
+from .fastmodel_config_map import FastmodelConfigMap
+
+from .config_templates import template_default_config, \
+    template_regression_config, template_coreipc_config, \
+    template_coreipctfmlevel2_config
diff --git a/tfm_ci_pylib/fastmodel_wrapper/config_templates.py b/tfm_ci_pylib/fastmodel_wrapper/config_templates.py
new file mode 100644
index 0000000..6c9c636
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/config_templates.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+
+""" config_templatess.py:
+
+ """
+
+from __future__ import print_function
+from copy import deepcopy
+from .fastmodel_wrapper_config import fpv_wrapper_config
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+
+# ===================  Template Classes ===================
+class template_cfg(fpv_wrapper_config):
+    """ Creates a skeleton template configuration that allows creation of
+    configuration variants which set the parameters of:
+    buildpath, config, platform, compiler , as well as the missing test params,
+    test_rex, test_cases, test_end_string """
+
+    _name = fpv_wrapper_config._name + "_%(platform)s_%(compiler)s_" + \
+        "%(config)s_%(build_type)s_%(bootloader)s"
+    # variant dictionary allows indivudal and targeted parameter modification
+    _vdict = {
+        "build_path": "%(build_path)s",
+        "variant_name_tpl": "%(variant_name_tpl)s",
+        "app_bin_path": "%(app_bin_path)s",
+        "app_bin": "%(app_bin)s",
+        "data_bin_path": "%(data_bin_path)s",
+        "data_bin": "%(data_bin)s",
+        "data_bin_offset": "%(data_bin_offset)s",
+        "config": "%(config)s",
+        "platform": "%(platform)s",
+        "compiler": "%(compiler)s",
+        "build_type": "%(build_type)s",
+        "bootloader": "%(bootloader)s"
+    }
+
+    _cfg = deepcopy(fpv_wrapper_config._cfg)
+    _cfg["directory"] = "FVP_MPS2"
+    _cfg["terminal_log"] = "terminal_%(variant_name_tpl)s.log"
+    _cfg["bin"] = "FVP_MPS2_AEMv8M"
+    _cfg["error_on_failed"] = False
+    _cfg["application"] = (
+        "cpu0=%(build_path)s/%(variant_name_tpl)s/" +
+        "%(app_bin_path)s/%(app_bin)s")
+    _cfg["data"] = (
+        "cpu0=%(build_path)s/%(variant_name_tpl)s/%(data_bin_path)s/" +
+        "%(data_bin)s@%(data_bin_offset)s")
+    _cfg["simlimit"] = "600"
+    _cfg["parameters"] = [
+        "fvp_mps2.platform_type=2",
+        "cpu0.baseline=0",
+        "cpu0.INITVTOR_S=0x10000000",
+        "cpu0.semihosting-enable=0",
+        "fvp_mps2.DISABLE_GATING=0",
+        "fvp_mps2.telnetterminal0.start_telnet=0",
+        "fvp_mps2.telnetterminal1.start_telnet=0",
+        "fvp_mps2.telnetterminal2.start_telnet=0",
+        "fvp_mps2.telnetterminal0.quiet=1",
+        "fvp_mps2.telnetterminal1.quiet=1",
+        "fvp_mps2.telnetterminal2.quiet=1",
+        "fvp_mps2.UART0.out_file=$TERM_FILE",
+        "fvp_mps2.UART0.unbuffered_output=1",
+        "fvp_mps2.UART0.shutdown_on_eot=1",
+        "fvp_mps2.mps2_visualisation.disable-visualisation=1"]
+
+
+class template_default_config(template_cfg):
+    """ Will automatically populate the required information for tfm
+    Default configuration testing. User still needs to set the
+    buildpath, platform, compiler variants """
+
+    _cfg = deepcopy(template_cfg._cfg)
+
+    _vdict = deepcopy(template_cfg._vdict)
+
+    # Set defaults across all variants
+    _vdict["build_path"] = "build-ci-all"
+    _vdict["app_bin_path"] = "install/outputs/fvp"
+    _vdict["data_bin_path"] = "install/outputs/fvp"
+    _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+        "%(build_type)s_%(bootloader)s"
+
+    # Mofify the %(config)s parameter of the template
+    _vdict["config"] = "ConfigDefault"
+    _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+    # System supports two types of matching with
+    # test_case_id and result match group and only test_case_id
+    _cfg["test_rex"] = (r'\x1b\[1;34m\[Sec Thread\] '
+                        r'(?P<test_case_id>Secure image initializing!)\x1b\[0m'
+                        )
+
+    # test_case_id capture group Should match test_cases entries
+    _cfg["test_cases"] = [
+        'Secure image initializing!',
+    ]
+    # Testing will stop if string is reached
+    _cfg["test_end_string"] = "Secure image initializing"
+    _cfg["simlimit"] = "120"
+
+class template_regression_config(template_cfg):
+    """ Will automatically populate the required information for tfm
+    Regression configuration testing. User still needs to set the
+    buildpath, platform, compiler variants """
+
+    _cfg = deepcopy(template_cfg._cfg)
+    _vdict = deepcopy(template_cfg._vdict)
+
+    # Set defaults across all variants
+    _vdict["build_path"] = "build-ci-all"
+    _vdict["app_bin_path"] = "install/outputs/fvp"
+    _vdict["data_bin_path"] = "install/outputs/fvp"
+    _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+        "%(build_type)s_%(bootloader)s"
+
+    # Mofify the %(config)s parameter of the template
+    _vdict["config"] = "ConfigRegression"
+    _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+    # Populate the test cases
+    _cfg["test_rex"] = (r"[\x1b]\[37mTest suite '(?P<test_case_id>[^\n]+)'"
+                        r" has [\x1b]\[32m (?P<result>PASSED|FAILED)")
+    _cfg["test_cases"] = [
+        'PSA protected storage S interface tests (TFM_SST_TEST_2XXX)',
+        'PSA protected storage NS interface tests (TFM_SST_TEST_1XXX)',
+        'SST reliability tests (TFM_SST_TEST_3XXX)',
+        'Core non-secure positive tests (TFM_CORE_TEST_1XXX)',
+        'AuditLog non-secure interface test (TFM_AUDIT_TEST_1XXX)',
+        'Crypto non-secure interface test (TFM_CRYPTO_TEST_6XXX)',
+        'Initial Attestation Service '
+        'non-secure interface tests(TFM_ATTEST_TEST_2XXX)',
+        'Invert non-secure interface tests (TFM_INVERT_TEST_1XXX)',
+        'SST rollback protection tests (TFM_SST_TEST_4XXX)',
+        'Audit Logging secure interface test (TFM_AUDIT_TEST_1XXX)',
+        'Crypto secure interface tests (TFM_CRYPTO_TEST_5XXX)',
+        'Initial Attestation Service secure '
+        'interface tests(TFM_ATTEST_TEST_1XXX)',
+        'Invert secure interface tests (TFM_INVERT_TEST_1XXX)',
+    ]
+    _cfg["test_end_string"] = "End of Non-secure test suites"
+
+    _cfg["simlimit"] = "1200"
+
+
+class template_coreipc_config(template_cfg):
+    """ Will automatically populate the required information for tfm
+    coreipc configuration testing. User still needs to set the
+    buildpath, platform, compiler variants """
+
+    _cfg = deepcopy(template_cfg._cfg)
+
+    _vdict = deepcopy(template_cfg._vdict)
+
+    # Set defaults across all variants
+    _vdict["build_path"] = "build-ci-all"
+
+    _vdict["app_bin_path"] = "install/outputs/fvp"
+    _vdict["data_bin_path"] = "install/outputs/fvp"
+
+    _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+        "%(build_type)s_%(bootloader)s"
+
+    # Mofify the %(config)s parameter of the template
+    _vdict["config"] = "ConfigCoreIPC"
+    _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+    # System supports two types of matching with
+    # test_case_id and result match group and only test_case_id
+    _cfg["test_rex"] = (r'\x1b\[1;34m\[Sec Thread\] '
+                        r'(?P<test_case_id>Secure image initializing!)\x1b\[0m'
+                        )
+
+    # test_case_id capture group Should match test_cases entries
+    _cfg["test_cases"] = [
+        'Secure image initializing!',
+    ]
+    # Testing will stop if string is reached
+    _cfg["test_end_string"] = "Secure image initializing"
+    _cfg["simlimit"] = "1200"
+
+class template_coreipctfmlevel2_config(template_cfg):
+    """ Will automatically populate the required information for tfm
+    coreipc tfmlevel2 configuration testing. User still needs to set the
+    buildpath, platform, compiler variants """
+
+    _cfg = deepcopy(template_cfg._cfg)
+
+    _vdict = deepcopy(template_cfg._vdict)
+
+    # Set defaults across all variants
+    _vdict["build_path"] = "build-ci-all"
+
+    _vdict["app_bin_path"] = "install/outputs/fvp"
+    _vdict["data_bin_path"] = "install/outputs/fvp"
+
+    _vdict["variant_name_tpl"] = "%(platform)s_%(compiler)s_%(config)s_" + \
+        "%(build_type)s_%(bootloader)s"
+
+    # Mofify the %(config)s parameter of the template
+    _vdict["config"] = "ConfigCoreIPCTfmLevel2"
+    _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+    # System supports two types of matching with
+    # test_case_id and result match group and only test_case_id
+    _cfg["test_rex"] = (r'\x1b\[1;34m\[Sec Thread\] '
+                        r'(?P<test_case_id>Secure image initializing!)\x1b\[0m'
+                        )
+
+    # test_case_id capture group Should match test_cases entries
+    _cfg["test_cases"] = [
+        'Secure image initializing!',
+    ]
+    # Testing will stop if string is reached
+    _cfg["test_end_string"] = "Secure image initializing"
+    _cfg["simlimit"] = "1200"
diff --git a/tfm_ci_pylib/fastmodel_wrapper/fastmodel_config_map.py b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_config_map.py
new file mode 100644
index 0000000..1a58441
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_config_map.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python3
+
+""" fastmodel_config_map.py:
+
+    Using Python clas inheritance model to generate modular and easily to scale
+    configuration models for the run_fpv module. Configuration data is also
+    combined with helper methods. If the file is run as a standalone file,
+    it can save json configuration files to disk if requested by --export
+    directive """
+
+from __future__ import print_function
+from copy import deepcopy
+from pprint import pprint
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+
+class FastmodelConfigMap(object):
+
+    def __init__(self, enviroment, platform):
+        pass
+
+        self._platforms = [platform]
+        self._cfg_map = self.global_import(enviroment)
+        self._invalid = []
+
+    def add_invalid(self, invalid_tuple):
+        self._invalid.append(invalid_tuple)
+
+    def get_invalid(self):
+        return deepcopy(self._invalid)
+
+    def global_import(self, enviroment, classname="TfmFastModelConfig"):
+        """ Import modules with specified classname from enviroment
+        provided by caller """
+
+        # Select the imported modules with a  __name__ attribute
+        ol = {nme: cfg for nme, cfg in enviroment.items()
+              if hasattr(cfg, '__name__')}
+
+        # Select those who match the classname
+        fcfg = {nme: cfg_obj for nme, cfg_obj
+                in ol.items() if cfg_obj .__name__ == classname}
+
+        return {self._platforms[0]: fcfg}
+
+    def __add__(self, obj_b):
+        """ Override addition operator """
+
+        # Create a new object of left hand operant for return
+        ret_obj = deepcopy(self)
+
+        # Get references to new class members
+        map_a = ret_obj._cfg_map
+        platforms_a = ret_obj._platforms
+        map_b = obj_b.get_object_map()
+        for platform, config in map_b.items():
+
+            if platform in map_a.keys():
+                for cfg_name, cfg_object in config.items():
+                    if cfg_name in map_a[platform].keys():
+                        print("Matching entrty name %s" % (cfg_name))
+                        print("Left operant entry: %s "
+                              "will be replaced by: %s" %
+                              (map_a[platform][cfg_name], cfg_object))
+                    map_a[platform][cfg_name] = cfg_object
+            else:
+                map_a[platform] = deepcopy(config)
+                platforms_a.append(platform)
+
+        return ret_obj
+
+    def _cmerge(self):
+        """ Join all the platform configs """
+
+        ret = {}
+        for entry in self._cfg_map.values():
+            for name, cfg in entry.items():
+                ret[name] = cfg
+        return ret
+
+    def get_object_map(self):
+        """ Returns the config map as objects """
+
+        return deepcopy(self._cfg_map)
+
+    def get_config_map(self):
+        """ Return a copy of the config map with the config objects rendered
+        as dictionaries """
+
+        ret_dict = deepcopy(self._cfg_map)
+        for platform, config in self._cfg_map.items():
+            for name, cfg_object in config.items():
+                ret_dict[platform][name] = cfg_object.get_config()
+        return ret_dict
+
+    def list(self):
+        """ Print a quick list of the contained platforms and
+         configuration names """
+
+        return list(self._cmerge().keys())
+
+    def print_list(self):
+        """ Print a quick list of the contained platforms and
+         configuration names """
+
+        for platform, config in self._cfg_map.items():
+            print("=========== Platform: %s ===========" % platform)
+            for name, cfg_object in config.items():
+                print(name)
+
+    def print(self):
+        """ Print the contents of a human readable config map """
+
+        pprint(self.get_config_map())
+
+    def get_config_object(self, config_name, platform=None):
+        try:
+            cfg_dict = self._cfg_map[platform]
+        except Exception as e:
+            cfg_dict = self._cmerge()
+
+        return cfg_dict[config_name]
+
+    def get_config(self, config_name, platform=None):
+
+        return self.get_config_object(config_name, platform).get_config()
+
+    def patch_config(self, cfg_name, key, new_data, platform=None):
+        """ Modify a configuration entry, and re-render the class """
+
+        cfg_object = self.get_config_object(cfg_name, platform)
+
+        # Do not
+        if cfg_object.get_variant_metadata()[key] == new_data:
+            return
+        v_meta = cfg_object.get_variant_metadata()
+        v_meta[key] = new_data
+        cfg_object.set_variant_metadata(v_meta).rebuild()
+
+
+def fvp_config_object_change_path(cfg_object, new_path):
+    """ Change the common artifact storage path and update its
+    configuration """
diff --git a/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper.py b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper.py
new file mode 100755
index 0000000..7566c2e
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper.py
@@ -0,0 +1,553 @@
+#!/usr/bin/env python3
+
+""" fastmodel_wrapper.py:
+
+    Wraps around Fast models which will execute in headless model
+    producing serial output to a defined log file. It will spawn two Proccesses
+    and one thread to monitor the output of the simulation and end it when a
+    user defined condition is matched. It will perform a set of tests and will
+    change the script exit code based on the output of the test """
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+import os
+import re
+import sys
+import argparse
+from time import sleep
+from pprint import pprint
+from copy import deepcopy
+from threading import Thread
+from queue import Queue, Empty
+from subprocess import Popen, PIPE, STDOUT
+
+try:
+    from tfm_ci_pylib.utils import find_missing_files, \
+        detect_python3, test, check_pid_status, save_json, save_dict_json, \
+        load_json
+except ImportError:
+    dir_path = os.path.dirname(os.path.realpath(__file__))
+    sys.path.append(os.path.join(dir_path, "../"))
+    from tfm_ci_pylib.utils import find_missing_files, \
+        detect_python3, test, check_pid_status, save_json, save_dict_json, \
+        load_json
+
+
+class FastmodelWrapper(object):
+    """ Controlling Class that wraps around an ARM Fastmodel and controls
+    execution, adding regex flow controls, and headless testing """
+
+    def __init__(self,
+                 fvp_cfg=None,
+                 work_dir="./",
+                 fvp_dir=None,
+                 fvp_binary=None,
+                 fvp_app=None,
+                 fvp_boot=None,
+                 terminal_file=None,
+                 fvp_time_out=None,
+                 fvp_test_error=None):
+
+        # Required by other methods, always set working directory first
+        self.work_dir = os.path.abspath(work_dir)
+
+        # Load the configuration from object or file
+        self.config, self.name = self.load_config(fvp_cfg)
+
+        self.show_config()
+
+        # Print a header
+        ln = int((62 - len(self.name) + 1) / 2)
+        print("\n%s Running Test: %s %s\n" % ("#" * ln, self.name, "#" * ln))
+
+        # consume the configuration parameters not related to FPV
+        # Extract test cases
+        self.test_list = self.config.pop("test_cases")
+        self.test_end_string = self.config.pop("test_end_string")
+        self.test_rex = self.config.pop("test_rex")
+
+        # Command line arguments overrides
+        # When those arguments are provided they override config entries
+        f_dir = self.config.pop("directory")
+        if fvp_dir:
+            self.fvp_dir = os.path.abspath(fvp_dir)
+        else:
+            self.fvp_dir = os.path.abspath(f_dir)
+
+        ef = self.config.pop("error_on_failed")
+        if fvp_test_error:
+            self.fvp_test_error = fvp_test_error
+        else:
+            self.fvp_test_error = ef
+
+        tf = self.config.pop("terminal_log")
+        if terminal_file:
+            self.term_file = os.path.abspath(terminal_file)
+        else:
+            tf = os.path.join(self.work_dir, tf)
+            self.term_file = os.path.abspath(tf)
+
+        # Override config entries directly
+        if fvp_binary:
+            self.config["bin"] = fvp_binary
+
+        if fvp_boot:
+            if re.match(r'[\S]+.axf$', fvp_boot):
+                self.config["application"] = "cpu0=" +\
+                                             os.path.abspath(fvp_boot)
+            else:
+                print("Invalid bootloader %s. Expecting .axf file" % fvp_app)
+                sys.exit(1)
+
+        # Ensure that the firmware is copied at the appropriate memory region
+        # perfect mathc regx for future ref r'^(?:cpu=)[\S]+.bin@0x10080000$'
+        #  TODO remove that when other platforms are added
+        if fvp_app:
+            if re.match(r'[\S]+.bin$', fvp_app):
+                self.config["data"] = "cpu0=" +\
+                                      os.path.abspath(fvp_app) +\
+                                      "@0x10080000"
+            else:
+                print("Invalid firmware %s. Expecting .bin file" % fvp_app)
+                sys.exit(1)
+
+        if fvp_time_out:
+            self.fvp_time_out = fvp_time_out
+            self.config["simlimit"] = fvp_time_out
+
+        self.monitor_q = Queue()
+        self.stop_all = False
+        self.pids = []
+        self.fvp_test_summary = False
+
+        # Asserted only after a complete test run,including end string matching
+        self.test_complete = False
+
+        self.test_report = None
+
+        # Change to working directory
+        os.chdir(self.work_dir)
+        print("Switching to working directory: %s" % self.work_dir)
+        # Clear the file it it has been created before
+        with open(self.term_file, "w") as F:
+            F.write("")
+
+    def show_config(self):
+        """ print the configuration to console """
+
+        print("\n%s config:\n" % self.name)
+        pprint(self.config)
+
+    def load_config(self, config):
+        """ Load the configuration from a json file or a memory map"""
+
+        try:
+            # If config is an dictionary object use it as is
+            if isinstance(config, dict):
+                ret_config = config
+            elif isinstance(config, str):
+                # if the file provided is not detected attempt to look for it
+                # in working directory
+                if not os.path.isfile(config):
+                    # remove path from file
+                    cfg_file_2 = os.path.split(config)[-1]
+                    # look in the current working directory
+                    cfg_file_2 = os.path.join(self.work_dir, cfg_file_2)
+                    if not os.path.isfile(cfg_file_2):
+                        m = "Could not find cfg in %s or %s " % (config,
+                                                                 cfg_file_2)
+                        raise Exception(m)
+                    # If fille exists in working directory
+                    else:
+                        config = cfg_file_2
+                # Attempt to load the configuration from File
+                ret_config = load_json(config)
+            else:
+                raise Exception("Need to provide a valid config name or file."
+                                "Please use --config/--config-file parameter.")
+
+        except Exception as e:
+            print("Error! Could not load config. Quitting")
+            sys.exit(1)
+
+        # Generate Test name (Used in test report) from terminal file.
+        tname = ret_config["terminal_log"].replace("terminal_", "")\
+            .split(".")[0].lower()
+
+        return deepcopy(ret_config), tname
+
+    def save_config(self, config_file="fvp_tfm_config.json"):
+        """ Safe current configuration to a json file """
+
+        # Add stripped information to config
+        exp_cfg = deepcopy(self.config)
+
+        exp_cfg["terminal_log"] = self.term_file
+        exp_cfg["error_on_failed"] = self.fvp_test_error
+        exp_cfg["directory"] = self.fvp_dir
+        exp_cfg["test_cases"] = self.test_list
+        exp_cfg["test_end_string"] = self.test_end_string
+        exp_cfg["test_rex"] = self.test_rex
+
+        cfg_f = os.path.join(self.work_dir, config_file)
+        save_dict_json(cfg_f, exp_cfg, exp_cfg.get_sort_order())
+        print("Configuration %s exported." % cfg_f)
+
+    def compile_cmd(self):
+        """ Compile all the FPV realted information into a command that can
+        be executed manually """
+
+        cmd = ""
+        for name, value in self.config.items():
+            # Place executable to the beggining of the machine
+            if name == "bin":
+                cmd = value + cmd
+            elif name == "parameters":
+                cmd += " " + " ".join(["--parameter %s" % p for p in value])
+            # Allows setting a second binary file as data field
+            elif name == "application" and ".bin@0x0" in value:
+                cmd += " --data %s" % value
+            else:
+                cmd += " --%s %s" % (name, value)
+
+        # Add the path to the command
+        cmd = os.path.join(self.fvp_dir, cmd)
+
+        # Add the log file to the command (optional)
+        cmd = cmd.replace("$TERM_FILE", self.term_file)
+        return cmd
+
+    def show_cmd(self):
+        """ print the FPV command to console """
+
+        print(self.compile_cmd())
+
+    def run_fpv(self):
+        """ Run the Fast Model test in a different proccess and return
+        the pid for housekeeping puproses """
+
+        def fpv_stdout_parser(dstream, queue):
+            """ THREAD: Read STDOUT/STDERR and stop if proccess is done """
+
+            for line in iter(dstream.readline, b''):
+                if self.stop_all:
+                    break
+                else:
+                    # Python2 ignores byte literals, P3 requires parsing
+                    if detect_python3():
+                        line = line.decode("utf-8")
+                if "Info: /OSCI/SystemC: Simulation stopped by user" in line:
+                    print("/OSCI/SystemC: Simulation stopped")
+                    self.stop()
+                    break
+
+        # Convert to list
+        cmd = self.compile_cmd().split(" ")
+
+        # Run it as subproccess
+        self.fvp_proc = Popen(cmd, stdout=PIPE, stderr=STDOUT, shell=False)
+        self._fvp_thread = Thread(target=fpv_stdout_parser,
+                                  args=(self.fvp_proc.stdout,
+                                        self.monitor_q))
+        self._fvp_thread.daemon = True
+        self._fvp_thread.start()
+        return self.fvp_proc.pid
+
+    def run_monitor(self):
+        """ Run a parallel threaded proccess that monitors the output of
+        the FPV and stops it when the a user specified string is found.
+        It returns the pid of the proccess for housekeeping """
+
+        def monitor_producer(dstream, queue):
+            """ THREAD: Read STDOUT and push data into a queue """
+
+            for line in iter(dstream.readline, b''):
+                if self.stop_all:
+                    break
+                else:
+                    # Python2 ignores byte literals, P3 requires parsing
+                    if detect_python3():
+                        line = line.decode("utf-8")
+
+                    queue.put(line)
+
+                # If the text end string is found terminate
+                if self.test_end_string in str(line):
+
+                    queue.put("Found End String \"%s\"" % self.test_end_string)
+                    self.test_complete = True
+                    self.stop()
+                    break
+                # If the FPV stopps by iteself (i.e simlimit reached) terminate
+                if "SystemC: Simulation stopped by user" in str(line):
+
+                    queue.put("Simulation Ended \"%s\"" % self.test_end_string)
+                    self.stop()
+                    break
+
+            dstream.close()
+            return
+
+        # Run the tail as a separate proccess
+        cmd = ["tail", "-f", self.term_file]
+        self.monitor_proc = Popen(cmd, stdout=PIPE, stderr=STDOUT, shell=False)
+
+        self._fvp_mon_thread = Thread(target=monitor_producer,
+                                      args=(self.monitor_proc.stdout,
+                                            self.monitor_q))
+        self._fvp_mon_thread.daemon = True
+        self._fvp_mon_thread.start()
+        return self.monitor_proc.pid
+
+    def monitor_consumer(self):
+        """ Read the ouptut of the monitor thread and print the queue entries
+        one entry at the time (One line per call) """
+        try:
+            line = self.monitor_q.get_nowait()
+        except Empty:
+            pass
+        else:
+            print(line.rstrip())
+
+    def has_stopped(self):
+        """Retrun status of stop flag. True indicated stopped state """
+
+        return self.stop_all
+
+    def start(self):
+        """ Start the FPV and the montor procccesses and keep
+        track of their pids"""
+
+        #  Do not spawn fpv unless everything is in place if
+        bin_list = [os.path.join(self.fvp_dir, self.config["bin"]),
+                    self.config["application"].replace("cpu0=", "")
+                                              .replace("@0x0", ""),
+                    self.config["data"].replace("@0x10080000", "")
+                                       .replace("@0x00100000", "")
+                                       .replace("cpu0=", "")]
+
+        if find_missing_files(bin_list):
+            print("Could not find all binaries from %s" % ", ".join(bin_list))
+            print("Missing Files:", ", ".join(find_missing_files(bin_list)))
+            sys.exit(1)
+
+        self.pids.append(self.run_fpv())
+        self.pids.append(self.run_monitor())
+        print("Spawned Proccesses with PID %s" % repr(self.pids)[1:-1])
+        return self
+
+    def stop(self):
+        """ Stop all threads, proccesses and make sure there are no leaks """
+
+        self.stop_all = True
+
+        # Send the gratious shutdown signal
+        self.monitor_proc.terminate()
+        self.fvp_proc.terminate()
+        sleep(1)
+        # List the Zombies
+        # TODO remove debug output
+        for pid in sorted(self.pids):
+            if check_pid_status(pid, ["zombie", ]):
+                pass
+                # print("Warning. Defunc proccess %s" % pid)
+
+    def test(self):
+        """ Parse the output terminal file and evaluate status of tests """
+
+        # read the output file
+        with open(self.term_file, "r") as F:
+            terminal_log = F.read()
+
+        pass_text = "PASSED"
+        # create a filtering regex
+        rex = re.compile(self.test_rex)
+
+        # Extract tests status as a tuple list
+        tests = rex.findall(terminal_log)
+
+        try:
+            if isinstance(tests, list):
+                if len(tests):
+                    # when test regex is  in format [(test_name, RESULT),...]
+                    if isinstance(tests[0], tuple):
+                        # Convert result into a dictionary
+                        tests = dict(zip(*list(zip(*tests))))
+                    # when regex is  in format [(test_name, test_name 2),...]
+                    # we just need to verify they exist
+                    elif isinstance(tests[0], str):
+                        pass_text = "PRESENT"
+                        tests = dict(zip(tests,
+                                     [pass_text for n in range(len(tests))]))
+                    else:
+                        raise Exception("Incompatible Test Format")
+                else:
+                    raise Exception("Incompatible Test Format")
+            else:
+                raise Exception("Incompatible Test Format")
+        except Exception:
+
+            if not self.test_complete:
+                print("Warning! Test did not complete.")
+            else:
+                print("Error", "Invalid tests format: %s type: %s" %
+                      (tests, type(tests)))
+            # Pass an empty output to test. Do not exit prematurely
+            tests = {}
+
+        # Run the test and store the report
+        self.test_report = test(self.test_list,
+                                tests,
+                                pass_text=pass_text,
+                                test_name=self.name,
+                                error_on_failed=self.fvp_test_error,
+                                summary=self.fvp_test_summary)
+        return self
+
+    def get_report(self):
+        """ Return the test report object to caller """
+
+        if not self.test_report:
+            raise Exception("Can not create report from incomplete run cycle!")
+        return self.test_report
+
+    def save_report(self, rep_f=None):
+        """ Export report into a file, set by test name but can be overidden by
+        rep_file"""
+
+        if not self.stop_all or not self.test_report:
+            print("Can not create report from incomplete run cycle!")
+            return
+
+        if not rep_f:
+            rep_f = os.path.join(self.work_dir, "report_%s.json" % self.name)
+            rep_f = os.path.abspath(rep_f)
+        save_json(rep_f, self.test_report)
+        print("Exported test report: %s" % rep_f)
+        return self
+
+    def block_wait(self):
+        """ Block execution flow and wait for the monitor to complete """
+        try:
+            while True:
+                for pid in sorted(self.pids):
+
+                    if not check_pid_status(pid, ["running",
+                                                  "sleeping",
+                                                  "disk"]):
+                        print("Child proccess of pid: %s has died, exitting!" %
+                              pid)
+                        self.stop()
+                if self.has_stopped():
+                    break
+                else:
+                    self.monitor_consumer()
+
+        except KeyboardInterrupt:
+            print("User initiated interrupt")
+            self.stop()
+        # Allows method to be chainloaded
+        return self
+
+
+def get_cmd_args():
+    """ Parse command line arguments """
+
+    # Parse command line arguments to override config
+    parser = argparse.ArgumentParser(description="TFM Fastmodel wrapper.")
+    parser.add_argument("--bin",
+                        dest="fvp_bin",
+                        action="store",
+                        help="Fast Model platform binary file")
+    parser.add_argument("--firmware",
+                        dest="fvp_firm",
+                        action="store",
+                        help="Firmware application file to run")
+    parser.add_argument("--boot",
+                        dest="fvp_boot",
+                        action="store",
+                        help="Fast Model bootloader file")
+    parser.add_argument("--fpv-path",
+                        dest="fvp_dir",
+                        action="store",
+                        help="Directory path containing the Fast Models")
+    parser.add_argument("--work-path",
+                        dest="work_dir", action="store",
+                        default="./",
+                        help="Working directory (Where logs are stored)")
+    parser.add_argument("--time-limit",
+                        dest="time", action="store",
+                        help="Time in seconds to run the simulation")
+    parser.add_argument("--log-file",
+                        dest="termf",
+                        action="store",
+                        help="Set terminal log file name")
+    parser.add_argument("--error",
+                        dest="test_err",
+                        action="store",
+                        help="raise sys.error = 1 if test failed")
+    parser.add_argument("--config-file",
+                        dest="config_file",
+                        action="store",
+                        help="Path of configuration file")
+    parser.add_argument("--print-config",
+                        dest="p_config",
+                        action="store_true",
+                        help="Print the configuration to console")
+    parser.add_argument("--print-command",
+                        dest="p_command",
+                        action="store_true",
+                        help="Print the FPV launch command to console")
+    return parser.parse_args()
+
+
+def main(user_args):
+    """ Main logic """
+
+    # Create FPV handler
+    F = FastmodelWrapper(fvp_cfg=user_args.config_file,
+                         work_dir=user_args.work_dir,
+                         fvp_dir=user_args.fvp_dir,
+                         fvp_binary=user_args.fvp_bin,
+                         fvp_boot=user_args.fvp_boot,
+                         fvp_app=user_args.fvp_firm,
+                         terminal_file=user_args.termf,
+                         fvp_time_out=user_args.time,
+                         fvp_test_error=user_args.test_err)
+
+    if user_args.p_config:
+        F.show_config()
+        sys.exit(0)
+
+    if user_args.p_command:
+        F.show_cmd()
+        sys.exit(0)
+
+    # Start the wrapper
+    F.start()
+
+    # Wait for the wrapper to complete
+    F.block_wait()
+
+    print("Shutting Down")
+    # Test the output of the system only after a full execution
+    if F.test_complete:
+        F.test()
+
+
+if __name__ == "__main__":
+    main(get_cmd_args())
diff --git a/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper_config.py b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper_config.py
new file mode 100644
index 0000000..0c2f60a
--- /dev/null
+++ b/tfm_ci_pylib/fastmodel_wrapper/fastmodel_wrapper_config.py
@@ -0,0 +1,267 @@
+#!/usr/bin/env python3
+
+""" fastmodel_wrapper_config.py:
+
+    Using Python clas inheritance model to generate modular and easily to scale
+    configuration models for the run_fpv module. Configuration data is also
+    combined with helper methods. If the file is run as a standalone file,
+    it can save json configuration files to disk if requested by --export
+    directive """
+
+from __future__ import print_function
+from collections import OrderedDict
+from copy import deepcopy
+from pprint import pprint
+
+__copyright__ = """
+/*
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+__author__ = "Minos Galanakis"
+__email__ = "minos.galanakis@linaro.org"
+__project__ = "Trusted Firmware-M Open CI"
+__status__ = "stable"
+__version__ = "1.1"
+
+
+try:
+    from tfm_ci_pylib.utils import save_dict_json
+except ImportError:
+    import os
+    import sys
+    dir_path = os.path.dirname(os.path.realpath(__file__))
+    sys.path.append(os.path.join(dir_path, "../"))
+    from tfm_ci_pylib.utils import save_dict_json
+
+
+# Used in fixed sorting of configuration before generating a json file
+# WARNING modification of this file will fundamentaly change behavior
+config_sort_order = [
+    "directory",
+    "terminal_log",
+    "bin",
+    "error_on_failed",
+    "test_rex", "test_cases",
+    "test_end_string",
+    "application",
+    "data",
+    "simlimit",
+    "parameters"
+]
+
+
+class fpv_wrapper_config(object):
+    """ Controlling Class that wraps around an ARM Fastmodel and controls
+    execution, adding regex flow controls, and headless testing """
+
+    # Ensure the dictionary entries are sorted
+    _cfg = OrderedDict.fromkeys(config_sort_order)
+    _name = "run_fpv"
+
+    def __init__(self,
+                 fvp_dir,
+                 terminal_file,
+                 fvp_binary,
+                 eof,
+                 test_rex,
+                 test_cases,
+                 test_end_string,
+                 fvp_app,
+                 fvp_boot,
+                 fvp_sim_limit,
+                 params):
+
+        self._cfg["directory"] = fvp_dir
+        self._cfg["terminal_log"] = terminal_file
+        self._cfg["bin"] = fvp_binary
+        self._cfg["error_on_failed"] = eof
+        self._cfg["test_rex"] = test_rex
+        self._cfg["test_cases"] = test_cases
+        self._cfg["test_end_string"] = test_end_string
+        self._cfg["application"] = fvp_app
+        self._cfg["data"] = fvp_boot
+        self._cfg["simlimit"] = fvp_sim_limit
+        self._cfg["parameters"] = params
+
+    @classmethod
+    def get_config(self):
+        """ Return a copy of the fastmodel configuration dictionary """
+        return dict(deepcopy(self._cfg))
+
+    @classmethod
+    def get_variant_metadata(self):
+        """ Return a copy of the class generator variant dictionary """
+        return deepcopy(self._vdict)
+
+    @classmethod
+    def set_variant_metadata(self, vdict):
+        """ Replace the metadata dictionary with user provided one """
+
+        self._vdict = deepcopy(vdict)
+
+        return self
+
+    @classmethod
+    def querry_variant_metadata(self, key, value):
+        """ Verify that metadata dictionary contains value for key entry """
+
+        return self._vdict[key] == value
+
+    @classmethod
+    def rebuild(self):
+        """ Recreate the configuration of a class after metadata has been
+        modified """
+
+        # Reset the configuration entries to the stock ones
+        self._cfg = deepcopy(self._tpl_cfg)
+
+        # recreate a temporary class with proper configuration
+        @config_variant(**self._vdict)
+        class tmp_class(self):
+            pass
+
+        # Copy over the new configuguration from temporary class
+        self._cfg = deepcopy(tmp_class._cfg)
+
+    @classmethod
+    def print(self):
+        """ Print the configuration dictionary in a human readable format """
+        pprint(dict(self._cfg))
+
+    @classmethod
+    def json_to_file(self, outfile=None):
+        """ Create a JSON file with the configration """
+
+        if not outfile:
+            outfile = self.get_name() + ".json"
+        save_dict_json(outfile, self.get_config(), config_sort_order)
+        print("Configuration exported to %s" % outfile)
+
+    @classmethod
+    def get_name(self):
+        """ Return the name of the configuration """
+
+        return self._name.lower()
+
+    def get_sort_order(self):
+        """ Return an ordered list of entries in the configuration """
+
+        return self._cfg.keys()
+
+
+def config_variant(**override_params):
+    """ Class decorator that enables dynamic subclass creation for different
+    configuration combinatins. Override params can be any keyword based
+    argument of template_cfg._vict """
+
+    def class_rebuilder(cls):
+        class TfmFastModelConfig(cls):
+            override = False
+            _cfg = deepcopy(cls._cfg)
+            _tpl_cfg = deepcopy(cls._cfg)
+            _vdict = deepcopy(cls._vdict)
+            for param, value in override_params.items():
+                if param in _vdict.keys():
+                    _vdict[param] = value
+                    override = True
+
+            if override:
+                _vdict["variant_name_tpl"] = _vdict["variant_name_tpl"] \
+                    % _vdict
+
+                # Update the configuration dependant enties
+                _cfg["terminal_log"] = _cfg["terminal_log"] % _vdict
+
+                # Adjust the binaries based on bootloader presense
+                if _vdict["bootloader"] == "BL2":
+                    _vdict["app_bin"] = override_params["app_bin"] if \
+                        "app_bin" in override_params else "mcuboot.axf"
+                    _vdict["data_bin"] = override_params["data_bin"] if \
+                        "data_bin" in override_params \
+                        else "tfm_s_ns_signed.bin"
+                    _vdict["data_bin_offset"] = "0x10080000"
+                else:
+                    _vdict["app_bin"] = override_params["app_bin"] if \
+                        "app_bin" in override_params else "tfm_s.axf"
+                    _vdict["data_bin"] = override_params["data_bin"] if \
+                        "data_bin" in override_params else "tfm_ns.bin"
+                    _vdict["data_bin_offset"] = "0x00100000"
+
+                # Switching from AN519 requires changing the parameter
+                # cpu0.baseline=0 -> 1
+                if _vdict["platform"] == "AN519":
+                    idx = _cfg["parameters"].index("cpu0.baseline=0")
+                    cpu_param = _cfg["parameters"].pop(idx).replace("=0", "=1")
+                    _cfg["parameters"].append(cpu_param)
+                _cfg["application"] = _cfg["application"] % _vdict
+                _cfg["data"] = _cfg["data"] % _vdict
+
+                _name = cls._name % _vdict
+
+        return TfmFastModelConfig
+
+    return class_rebuilder
+
+
+# ===================  Template Classes ===================
+class template_cfg(fpv_wrapper_config):
+    """ Creates a skeleton template configuration that allows creation of
+    configuration variants which set the parameters of:
+    buildpath, config, platform, compiler , as well as the missing test params,
+    test_rex, test_cases, test_end_string """
+
+    _name = fpv_wrapper_config._name + "_%(platform)s_%(compiler)s_" + \
+        "%(config)s_%(build_type)s_%(bootloader)s"
+    # variant dictionary allows indivudal and targeted parameter modification
+    _vdict = {
+        "build_path": "%(build_path)s",
+        "variant_name_tpl": "%(variant_name_tpl)s",
+        "app_bin_path": "%(app_bin_path)s",
+        "app_bin": "%(app_bin)s",
+        "data_bin_path": "%(data_bin_path)s",
+        "data_bin": "%(data_bin)s",
+        "data_bin_offset": "%(data_bin_offset)s",
+        "config": "%(config)s",
+        "platform": "%(platform)s",
+        "compiler": "%(compiler)s",
+        "build_type": "%(build_type)s",
+        "bootloader": "%(bootloader)s"
+    }
+
+    _cfg = deepcopy(fpv_wrapper_config._cfg)
+    _cfg["directory"] = "FVP_MPS2_11.3"
+    _cfg["terminal_log"] = "terminal_%(variant_name_tpl)s.log"
+    _cfg["bin"] = "FVP_MPS2_AEMv8M"
+    _cfg["error_on_failed"] = False
+    _cfg["application"] = (
+        "cpu0=%(build_path)s/%(variant_name_tpl)s/" +
+        "%(app_bin_path)s/%(app_bin)s")
+    _cfg["data"] = (
+        "cpu0=%(build_path)s/%(variant_name_tpl)s/%(data_bin_path)s/" +
+        "%(data_bin)s@%(data_bin_offset)s")
+    _cfg["simlimit"] = "60"
+    _cfg["parameters"] = [
+        "fvp_mps2.platform_type=2",
+        "cpu0.baseline=0",
+        "cpu0.INITVTOR_S=0x10000000",
+        "cpu0.semihosting-enable=0",
+        "fvp_mps2.DISABLE_GATING=0",
+        "fvp_mps2.telnetterminal0.start_telnet=0",
+        "fvp_mps2.telnetterminal1.start_telnet=0",
+        "fvp_mps2.telnetterminal2.start_telnet=0",
+        "fvp_mps2.telnetterminal0.quiet=1",
+        "fvp_mps2.telnetterminal1.quiet=1",
+        "fvp_mps2.telnetterminal2.quiet=1",
+        "fvp_mps2.UART0.out_file=$TERM_FILE",
+        "fvp_mps2.UART0.unbuffered_output=1",
+        "fvp_mps2.UART0.shutdown_on_eot=1",
+        "fvp_mps2.mps2_visualisation.disable-visualisation=1"]
+
+
+if __name__ == "__main__":
+    # Create Json configuration files on user request
+    pass
diff --git a/tfm_ci_pylib/lava_rpc_connector.py b/tfm_ci_pylib/lava_rpc_connector.py
index 269cbbf..885dc4a 100644
--- a/tfm_ci_pylib/lava_rpc_connector.py
+++ b/tfm_ci_pylib/lava_rpc_connector.py
@@ -19,7 +19,7 @@
 __email__ = "minos.galanakis@linaro.org"
 __project__ = "Trusted Firmware-M Open CI"
 __status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
 
 import xmlrpc.client
 import time
diff --git a/tfm_ci_pylib/structured_task.py b/tfm_ci_pylib/structured_task.py
index b97cae9..1a2f45f 100644
--- a/tfm_ci_pylib/structured_task.py
+++ b/tfm_ci_pylib/structured_task.py
@@ -19,7 +19,7 @@
 __email__ = "minos.galanakis@linaro.org"
 __project__ = "Trusted Firmware-M Open CI"
 __status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
 
 import abc
 import time
@@ -129,7 +129,7 @@
 
     def _t_stop(self):
         """ Internal class stop to be called through thread """
-        print("Thead is alive0 %s" % self.is_alive())
+
         if(self.is_alive()):
             print("%s =========> STOP" % self.get_name())
             self._stopevent.set()
diff --git a/tfm_ci_pylib/tfm_build_manager.py b/tfm_ci_pylib/tfm_build_manager.py
index dcf75de..0849a4b 100644
--- a/tfm_ci_pylib/tfm_build_manager.py
+++ b/tfm_ci_pylib/tfm_build_manager.py
@@ -18,14 +18,15 @@
 __email__ = "minos.galanakis@linaro.org"
 __project__ = "Trusted Firmware-M Open CI"
 __status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
 
 import os
 import sys
-from pprint import pprint
+from time import time
 from copy import deepcopy
 from .utils import gen_cfg_combinations, list_chunks, load_json,\
-    save_json, print_test
+    save_json, print_test, show_progress, \
+    resolve_rel_path
 from .structured_task import structuredTask
 from .tfm_builder import TFM_Builder
 
@@ -44,31 +45,28 @@
                             #               "CMAKE_BUILD_TYPE": "Debug"}
                  report=None,        # File to produce report
                  parallel_builds=3,  # Number of builds to run in parallel
-                 build_threads=4,    # Number of threads used per build
-                 markdown=True,      # Create markdown report
-                 html=True,          # Create html report
-                 ret_code=True,      # Set ret_code of script if build failed
-                 install=False):     # Install libraries after build
-
+                 build_threads=3,    # Number of threads used per build
+                 install=False,      # Install libraries after build
+                 img_sizes=False,    # Use arm-none-eabi-size for size info
+                 relative_paths=False):     # Store relative paths in report
         self._tbm_build_threads = build_threads
         self._tbm_conc_builds = parallel_builds
         self._tbm_install = install
-        self._tbm_markdown = markdown
-        self._tbm_html = html
-        self._tbm_ret_code = ret_code
+        self._tbm_img_sizes = img_sizes
+        self._tbm_relative_paths = relative_paths
 
         # Required by other methods, always set working directory first
         self._tbm_work_dir = os.path.abspath(os.path.expanduser(work_dir))
 
         self._tbm_tfm_dir = os.path.abspath(os.path.expanduser(tfm_dir))
 
-        # Entries will be filled after sanity test on cfg_dict dring pre_exec
-        self._tbm_build_dir = None
+        # Internal flag to tag simple (non combination formatted configs)
+        self.simple_config = False
         self._tbm_report = report
 
-        # TODO move them to pre_eval
         self._tbm_cfg = self.load_config(cfg_dict, self._tbm_work_dir)
-        self._tbm_build_cfg_list = self.parse_config(self._tbm_cfg)
+        self._tbm_build_cfg, \
+            self.tbm_common_cfg = self.parse_config(self._tbm_cfg)
 
         super(TFM_Build_Manager, self).__init__(name="TFM_Build_Manager")
 
@@ -79,27 +77,125 @@
     def pre_exec(self, eval_ret):
         """ """
 
+    def override_tbm_cfg_params(self, config, override_keys, **params):
+        """ Using a dictionay as input, for each key defined in
+        override_keys it will replace the config[key] entries with
+        the key=value parameters provided """
+
+        for key in override_keys:
+            if isinstance(config[key], list):
+                config[key] = [n % params for n in config[key]]
+            elif isinstance(config[key], str):
+                config[key] = config[key] % params
+            else:
+                raise Exception("Config does not contain key %s "
+                                "of type %s" % (key, config[key]))
+        return config
+
     def task_exec(self):
         """ Create a build pool and execute them in parallel """
 
         build_pool = []
-        for i in self._tbm_build_cfg_list:
 
-            name = "%s_%s_%s_%s_%s" % (i.TARGET_PLATFORM,
-                                       i.COMPILER,
-                                       i.PROJ_CONFIG,
-                                       i.CMAKE_BUILD_TYPE,
-                                       "BL2" if i.WITH_MCUBOOT else "NOBL2")
+        # When a config is flagged as a single build config.
+        # Name is evaluated by config type
+        if self.simple_config:
+
+            build_cfg = deepcopy(self.tbm_common_cfg)
+
+            # Extract the common for all elements of config
+            for key in ["build_cmds", "required_artefacts"]:
+                try:
+                    build_cfg[key] = build_cfg[key]["all"]
+                except KeyError:
+                    build_cfg[key] = []
+            name = build_cfg["config_type"]
+
+            # Override _tbm_xxx paths in commands
+            # plafrom in not guaranteed without seeds so _tbm_target_platform
+            # is ignored
+            over_dict = {"_tbm_build_dir_": os.path.join(self._tbm_work_dir,
+                                                         name),
+                         "_tbm_code_dir_": build_cfg["codebase_root_dir"]}
+
+            build_cfg = self.override_tbm_cfg_params(build_cfg,
+                                                     ["build_cmds",
+                                                      "required_artefacts",
+                                                      "artifact_capture_rex"],
+                                                     **over_dict)
+
+            # Overrides path in expected artefacts
             print("Loading config %s" % name)
-            build_pool.append(TFM_Builder(name,
-                              self._tbm_tfm_dir,
-                              self._tbm_work_dir,
-                              dict(i._asdict()),
-                              self._tbm_install,
-                              self._tbm_build_threads))
+
+            build_pool.append(TFM_Builder(
+                              name=name,
+                              work_dir=self._tbm_work_dir,
+                              cfg_dict=build_cfg,
+                              build_threads=self._tbm_build_threads,
+                              img_sizes=self._tbm_img_sizes,
+                              relative_paths=self._tbm_relative_paths))
+        # When a seed pool is provided iterate through the entries
+        # and update platform spefific parameters
+        elif len(self._tbm_build_cfg):
+
+            for name, i in self._tbm_build_cfg.items():
+                # Do not modify the original config
+                build_cfg = deepcopy(self.tbm_common_cfg)
+
+                # Extract the common for all elements of config
+                for key in ["build_cmds", "required_artefacts"]:
+                    try:
+                        build_cfg[key] = deepcopy(self.tbm_common_cfg[key]
+                                                  ["all"])
+                    except KeyError as E:
+                        build_cfg[key] = []
+
+                # Extract the platform specific elements of config
+                for key in ["build_cmds", "required_artefacts"]:
+                    try:
+                        if i.target_platform in self.tbm_common_cfg[key].keys():
+                            build_cfg[key] += deepcopy(self.tbm_common_cfg[key]
+                                                       [i.target_platform])
+                    except Exception as E:
+                        pass
+
+                # Merge the two dictionaries since the template may contain
+                # fixed and combinations seed parameters
+                cmd0 = build_cfg["config_template"] % \
+                    {**dict(i._asdict()), **build_cfg}
+
+                # Prepend configuration commoand as the first cmd
+                build_cfg["build_cmds"] = [cmd0] + build_cfg["build_cmds"]
+
+                # Set the overrid params
+                over_dict = {"_tbm_build_dir_": os.path.join(
+                    self._tbm_work_dir, name),
+                    "_tbm_code_dir_": build_cfg["codebase_root_dir"],
+                    "_tbm_target_platform_": i.target_platform}
+
+                over_params = ["build_cmds",
+                               "required_artefacts",
+                               "artifact_capture_rex"]
+                build_cfg = self.override_tbm_cfg_params(build_cfg,
+                                                         over_params,
+                                                         **over_dict)
+
+                # Overrides path in expected artefacts
+                print("Loading config %s" % name)
+
+                build_pool.append(TFM_Builder(
+                                  name=name,
+                                  work_dir=self._tbm_work_dir,
+                                  cfg_dict=build_cfg,
+                                  build_threads=self._tbm_build_threads,
+                                  img_sizes=self._tbm_img_sizes,
+                                  relative_paths=self._tbm_relative_paths))
+        else:
+            print("Could not find any configuration. Check the rejection list")
 
         status_rep = {}
-        full_rep = {}
+        build_rep = {}
+        completed_build_count = 0
         print("Build: Running %d parallel build jobs" % self._tbm_conc_builds)
         for build_pool_slice in list_chunks(build_pool, self._tbm_conc_builds):
 
@@ -118,11 +214,26 @@
                 # Similarly print the logs of the other builds as they complete
                 if build_pool_slice.index(build) != 0:
                     build.log()
+                completed_build_count += 1
                 print("Build: Finished %s" % build.get_name())
+                print("Build Progress:")
+                show_progress(completed_build_count, len(build_pool))
 
                 # Store status in report
                 status_rep[build.get_name()] = build.get_status()
-                full_rep[build.get_name()] = build.report()
+                build_rep[build.get_name()] = build.report()
+
+        # Include the original input configuration in the report
+
+        metadata = {"input_build_cfg": self._tbm_cfg,
+                    "build_dir": self._tbm_work_dir
+                    if not self._tbm_relative_paths
+                    else resolve_rel_path(self._tbm_work_dir),
+                    "time": time()}
+
+        full_rep = {"report": build_rep,
+                    "_metadata_": metadata}
+
         # Store the report
         self.stash("Build Status", status_rep)
         self.stash("Build Report", full_rep)
@@ -134,7 +245,10 @@
     def post_eval(self):
         """ If a single build failed fail the test """
         try:
-            retcode_sum = sum(self.unstash("Build Status").values())
+            status_dict = self.unstash("Build Status")
+            if not status_dict:
+                raise Exception()
+            retcode_sum = sum(status_dict.values())
             if retcode_sum != 0:
                 raise Exception()
             return True
@@ -156,30 +270,6 @@
         """ Expose the internal report to a new object for external classes """
         return deepcopy(self.unstash("Build Report"))
 
-    def print_summary(self):
-        """ Print an comprehensive list of the build jobs with their status """
-
-        full_rep = self.unstash("Build Report")
-
-        # Filter out build jobs based on status
-        fl = ([k for k, v in full_rep.items() if v['status'] == 'Failed'])
-        ps = ([k for k, v in full_rep.items() if v['status'] == 'Success'])
-
-        print_test(t_list=fl, status="failed", tname="Builds")
-        print_test(t_list=ps, status="passed", tname="Builds")
-
-    def gen_cfg_comb(self, platform_l, compiler_l, config_l, build_l, boot_l):
-        """ Generate all possible configuration combinations from a group of
-        lists of compiler options"""
-        return gen_cfg_combinations("TFM_Build_CFG",
-                                    ("TARGET_PLATFORM COMPILER PROJ_CONFIG"
-                                     " CMAKE_BUILD_TYPE WITH_MCUBOOT"),
-                                    platform_l,
-                                    compiler_l,
-                                    config_l,
-                                    build_l,
-                                    boot_l)
-
     def load_config(self, config, work_dir):
         try:
             # passing config_name param supersseeds fileparam
@@ -209,52 +299,147 @@
             print("Error:%s \nCould not load a valid config" % e)
             sys.exit(1)
 
-        pprint(ret_cfg)
         return ret_cfg
 
     def parse_config(self, cfg):
         """ Parse a valid configuration file into a set of build dicts """
 
-        # Generate a list of all possible confugration combinations
-        full_cfg = self.gen_cfg_comb(cfg["platform"],
-                                     cfg["compiler"],
-                                     cfg["config"],
-                                     cfg["build"],
-                                     cfg["with_mcuboot"])
+        ret_cfg = {}
 
-        # Generate a list of all invalid combinations
-        rejection_cfg = []
+        # Config entries which are not subject to changes during combinations
+        static_cfg = cfg["common_params"]
 
-        for k in cfg["invalid"]:
-            # Pad the omitted values with wildcard char *
-            res_list = list(k) + ["*"] * (5 - len(k))
+        # Converth the code path to absolute path
+        abs_code_dir = static_cfg["codebase_root_dir"]
+        abs_code_dir = os.path.abspath(os.path.expanduser(abs_code_dir))
+        static_cfg["codebase_root_dir"] = abs_code_dir
 
-            print("Working on rejection input: %s" % (res_list))
+        # seed_params is an optional field. Do not proccess if it is missing
+        if "seed_params" in cfg:
+            comb_cfg = cfg["seed_params"]
+            # Generate a list of all possible confugration combinations
+            ret_cfg = TFM_Build_Manager.generate_config_list(comb_cfg,
+                                                             static_cfg)
 
-            # Key order matters. Use index to retrieve default values When
-            # wildcard * char is present
-            _cfg_keys = ["platform",
-                         "compiler",
-                         "config",
-                         "build",
-                         "with_mcuboot"]
+            # invalid is an optional field. Do not proccess if it is missing
+            if "invalid" in cfg:
+                # Invalid configurations(Do not build)
+                invalid_cfg = cfg["invalid"]
+                # Remove the rejected entries from the test list
+                rejection_cfg = TFM_Build_Manager.generate_rejection_list(
+                    comb_cfg,
+                    static_cfg,
+                    invalid_cfg)
 
-            # Replace wildcard ( "*") entries with every inluded in cfg variant
-            for n in range(len(res_list)):
-                res_list[n] = [res_list[n]] if res_list[n] != "*" \
-                    else cfg[_cfg_keys[n]]
+                # Subtract the two configurations
+                ret_cfg = {k: v for k, v in ret_cfg.items()
+                           if k not in rejection_cfg}
+            self.simple_config = False
+        else:
+            self.simple_config = True
+        return ret_cfg, static_cfg
 
-            rejection_cfg += self.gen_cfg_comb(*res_list)
+    # ----- Override bellow methods when subclassing for other projects ----- #
 
-        # Notfy the user for the rejected configuations
-        for i in rejection_cfg:
+    def print_summary(self):
+        """ Print an comprehensive list of the build jobs with their status """
 
-            name = "%s_%s_%s_%s_%s" % (i.TARGET_PLATFORM,
-                                       i.COMPILER,
-                                       i.PROJ_CONFIG,
-                                       i.CMAKE_BUILD_TYPE,
-                                       "BL2" if i.WITH_MCUBOOT else "NOBL2")
-            print("Rejecting config %s" % name)
+        try:
+            full_rep = self.unstash("Build Report")["report"]
+            fl = ([k for k, v in full_rep.items() if v['status'] == 'Failed'])
+            ps = ([k for k, v in full_rep.items() if v['status'] == 'Success'])
+        except Exception as E:
+            print("No report generated")
+            return
+        if fl:
+            print_test(t_list=fl, status="failed", tname="Builds")
+        if ps:
+            print_test(t_list=ps, status="passed", tname="Builds")
 
-        # Subtract the two lists and convert to dictionary
-        return list(set(full_cfg) - set(rejection_cfg))
+    @staticmethod
+    def generate_config_list(seed_config, static_config):
+        """ Generate all possible configuration combinations from a group of
+        lists of compiler options"""
+        config_list = []
+
+        if static_config["config_type"] == "tf-m":
+            cfg_name = "TFM_Build_CFG"
+            # Ensure the fieds are sorted in the desired order
+            # seed_config can be a subset of sort order for configurations with
+            # optional parameters.
+            tags = [n for n in static_config["sort_order"]
+                    if n in seed_config.keys()]
+
+            data = []
+            for key in tags:
+                data.append(seed_config[key])
+            config_list = gen_cfg_combinations(cfg_name,
+                                               " ".join(tags),
+                                               *data)
+        else:
+            print("Not information for project type: %s."
+                  " Please check config" % static_config["config_type"])
+
+        ret_cfg = {}
+        # Notify the user for the rejected configuations
+        for i in config_list:
+            # Convert named tuples to string with boolean support
+            i_str = "_".join(map(lambda x: repr(x)
+                             if isinstance(x, bool) else x, list(i)))
+
+            # Replace bollean vaiables with more BL2/NOBL2 and use it as"
+            # configuration name.
+            ret_cfg[i_str.replace("True", "BL2").replace("False", "NOBL2")] = i
+
+        return ret_cfg
+
+    @staticmethod
+    def generate_rejection_list(seed_config,
+                                static_config,
+                                rejection_list):
+        rejection_cfg = {}
+
+        if static_config["config_type"] == "tf-m":
+
+            # If rejection list is empty do nothing
+            if not rejection_list:
+                return rejection_cfg
+
+            tags = [n for n in static_config["sort_order"]
+                    if n in seed_config.keys()]
+            sorted_default_lst = [seed_config[k] for k in tags]
+
+            # If tags are not alligned with rejection list entries quit
+            if len(tags) != len(rejection_list[0]):
+                print(len(tags), len(rejection_list[0]))
+                print("Error, tags should be assigned to each "
+                      "of the rejection inputs")
+                return []
+
+            # Replace wildcard ( "*") entries with every
+            # inluded in cfg variant
+            for k in rejection_list:
+                # Pad the omitted values with wildcard char *
+                res_list = list(k) + ["*"] * (5 - len(k))
+                print("Working on rejection input: %s" % (res_list))
+
+                for n in range(len(res_list)):
+
+                    res_list[n] = [res_list[n]] if res_list[n] != "*" \
+                        else sorted_default_lst[n]
+
+                # Generate a configuration and a name for the completed array
+                rj_cfg = TFM_Build_Manager.generate_config_list(
+                    dict(zip(tags, res_list)),
+                    static_config)
+
+                # Append the configuration to the existing ones
+                rejection_cfg = {**rejection_cfg, **rj_cfg}
+
+            # Notfy the user for the rejected configuations
+            for i in rejection_cfg.keys():
+                print("Rejecting config %s" % i)
+        else:
+            print("Not information for project type: %s."
+                  " Please check config" % static_config["config_type"])
+        return rejection_cfg
diff --git a/tfm_ci_pylib/tfm_builder.py b/tfm_ci_pylib/tfm_builder.py
index 37a1315..1908a8e 100644
--- a/tfm_ci_pylib/tfm_builder.py
+++ b/tfm_ci_pylib/tfm_builder.py
@@ -18,58 +18,48 @@
 __email__ = "minos.galanakis@linaro.org"
 __project__ = "Trusted Firmware-M Open CI"
 __status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
 
 import os
-from .utils import *
+import re
 import shutil
+from .utils import *
 from .structured_task import structuredTask
 
 
 class TFM_Builder(structuredTask):
     """ Wrap around tfm cmake system and spawn a thread to build the project.
     """
-    _tfb_build_params = ["TARGET_PLATFORM",
-                         "COMPILER",
-                         "PROJ_CONFIG",
-                         "CMAKE_BUILD_TYPE",
-                         "WITH_MCUBOOT"
-                         ]
-
-    _tfb_build_template = ("cmake -G \"Unix Makefiles\" -DPROJ_CONFIG=`"
-                           "readlink -f %(PROJ_CONFIG)s.cmake` "
-                           "-DTARGET_PLATFORM=%(TARGET_PLATFORM)s "
-                           "-DCOMPILER=%(COMPILER)s "
-                           "-DCMAKE_BUILD_TYPE=%(CMAKE_BUILD_TYPE)s "
-                           "-DBL2=%(WITH_MCUBOOT)s "
-                           "%(TFM_ROOT)s")
-
     def __init__(self,
                  name,      # Proccess name
-                 tfm_dir,   # TFM root directory
                  work_dir,  # Current working directory(ie logs)
                  cfg_dict,  # Input config dictionary of the following form
                             # input_dict = {"PROJ_CONFIG": "ConfigRegression",
                             #               "TARGET_PLATFORM": "MUSCA_A",
                             #               "COMPILER": "ARMCLANG",
                             #               "CMAKE_BUILD_TYPE": "Debug"}
-                 install=False,    # Install library after build
-                 build_threads=4,  # Number of CPU thrads used in build
-                 silent=False):    # Silence stdout ouptut
+                 build_threads=4,   # Number of CPU thrads used in build
+                 silent=False,      # Silence stdout ouptut
+                 img_sizes=False,   # Use arm-none-eabi-size for size info
+                 relative_paths=False):  # Store relative paths in report
 
         self._tfb_cfg = cfg_dict
         self._tfb_build_threads = build_threads
-        self._tfb_install = install
         self._tfb_silent = silent
+        self._tfb_img_sizes = img_sizes
+        self._tfb_relative_paths = relative_paths
         self._tfb_binaries = []
 
         # Required by other methods, always set working directory first
         self._tfb_work_dir = os.path.abspath(os.path.expanduser(work_dir))
 
-        self._tfb_tfm_dir = os.path.abspath(os.path.expanduser(tfm_dir))
+        # Override code_base_dir with abspath
+        _code_dir = self._tfb_cfg["codebase_root_dir"]
+        self._tfb_code_dir = os.path.abspath(os.path.expanduser(_code_dir))
         # Entries will be filled after sanity test on cfg_dict dring pre_exec
         self._tfb_build_dir = None
         self._tfb_log_f = None
+
         super(TFM_Builder, self).__init__(name=name)
 
     def mute(self):
@@ -77,10 +67,14 @@
 
     def log(self):
         """ Print and return the contents of log file """
-        with open(self._tfb_log_f, "r") as F:
-            log = F.read()
-        print(log)
-        return log
+        try:
+            with open(self._tfb_log_f, "r") as F:
+                log = F.read()
+            print(log)
+            return log
+        except FileNotFoundError:
+            print("Log %s not found" % self._tfb_log_f)
+            return ""
 
     def report(self):
         """Return the report on the job """
@@ -89,14 +83,10 @@
     def pre_eval(self):
         """ Tests that need to be run in set-up state """
 
-        # Test that all required entries exist in config
-        diff = list(set(self._tfb_build_params) - set(self._tfb_cfg.keys()))
-        if diff:
-            print("Cound't find require build entry: %s in config" % diff)
+        if not os.path.isdir(self._tfb_code_dir):
+            print("Missing code-base directory:", self._tfb_code_dir)
             return False
-        # TODO check validity of passed config values
-        # TODO test detection of srec
-        # self.srec_path = shutil.which("srec_cat")
+
         return True
 
     def pre_exec(self, eval_ret):
@@ -107,14 +97,6 @@
         # Ensure we have a clean build directory
         shutil.rmtree(self._tfb_build_dir, ignore_errors=True)
 
-        self._tfb_cfg["TFM_ROOT"] = self._tfb_tfm_dir
-
-        # Append the path for the config
-        self._tfb_cfg["PROJ_CONFIG"] = os.path.join(self._tfb_tfm_dir,
-                                                    "configs",
-                                                    self._tfb_cfg[("PROJ_"
-                                                                   "CONFIG")])
-
         # Log will be placed in work directory, named as the build dir
         self._tfb_log_f = "%s.log" % self._tfb_build_dir
 
@@ -123,100 +105,6 @@
             if not os.path.exists(p):
                 os.makedirs(p)
 
-        # Calcuate a list of expected binaries
-        binaries = []
-
-        # If install is asserted pick the iems from the appropriate location
-        if self._tfb_install:
-
-            fvp_path = os.path.join(self._tfb_build_dir,
-                                    "install", "outputs", "fvp")
-            platform_path = os.path.join(self._tfb_build_dir,
-                                         "install",
-                                         "outputs",
-                                         self._tfb_cfg["TARGET_PLATFORM"])
-
-            # Generate a list of binaries included in both directories
-            common_bin_list = ["tfm_%s.%s" % (s, e) for s in ["s", "ns"]
-                               for e in ["bin", "axf"]]
-            if self._tfb_cfg["WITH_MCUBOOT"]:
-                common_bin_list += ["mcuboot.%s" % e for e in ["bin", "axf"]]
-
-                # When building with bootloader extra binaries are expected
-                binaries += [os.path.join(platform_path, b) for b in
-                             ["tfm_sign.bin"]]
-                binaries += [os.path.join(fvp_path, b) for b in
-                             ["tfm_s_ns_signed.bin"]]
-
-            binaries += [os.path.join(p, b) for p in [fvp_path, platform_path]
-                         for b in common_bin_list]
-
-            # Add Musca required binaries
-            if self._tfb_cfg["TARGET_PLATFORM"] == "MUSCA_A":
-                binaries += [os.path.join(platform_path,
-                                          "musca_firmware.hex")]
-
-            self._tfb_binaries = binaries
-
-        else:
-            binaries += [os.path.join(self._tfb_build_dir, "app", "tfm_ns")]
-            binaries += [os.path.join(self._tfb_build_dir, "app",
-                                          "secure_fw", "tfm_s")]
-            if self._tfb_cfg["WITH_MCUBOOT"]:
-                binaries += [os.path.join(self._tfb_build_dir,
-                             "bl2", "ext", "mcuboot", "mcuboot")]
-
-            ext = ['.bin', '.axf']
-            self._tfb_binaries = ["%s%s" % (n, e) for n in binaries
-                                  for e in ext]
-
-            # Add Musca required binaries
-            if self._tfb_cfg["TARGET_PLATFORM"] == "MUSCA_A":
-                self._tfb_binaries += [os.path.join(self._tfb_build_dir,
-                                       "tfm_sign.bin")]
-                self._tfb_binaries += [os.path.join(self._tfb_build_dir,
-                                       "musca_firmware.hex")]
-
-    def get_binaries(self,
-                     bootl=None,
-                     bin_s=None,
-                     bin_ns=None,
-                     bin_sign=None,
-                     filt=None):
-        """ Return the absolute location of binaries (from config)
-        if they exist. Can add a filter parameter which will only
-        consider entries with /filter/ in their path as a directory """
-        ret_boot = None
-        ret_bin_ns = None
-        ret_bin_s = None
-        ret_bin_sign = None
-
-        # Apply filter as a /filter/ string to the binary list
-        filt = "/" + filt + "/" if filter else None
-        binaries = list(filter(lambda x: filt in x, self._tfb_binaries)) \
-            if filt else self._tfb_binaries
-
-        for obj_file in binaries:
-            fname = os.path.split(obj_file)[-1]
-            if bootl:
-                if fname == bootl:
-                    ret_boot = obj_file
-                    continue
-            if bin_s:
-                if fname == bin_s:
-                    ret_bin_s = obj_file
-                    continue
-
-            if bin_ns:
-                if fname == bin_ns:
-                    ret_bin_ns = obj_file
-                    continue
-            if bin_sign:
-                if fname == bin_sign:
-                    ret_bin_sign = obj_file
-                    continue
-        return [ret_boot, ret_bin_s, ret_bin_ns, ret_bin_sign]
-
     def task_exec(self):
         """ Main tasks """
 
@@ -224,141 +112,100 @@
         self.set_status(-1)
         # Go to build directory
         os.chdir(self._tfb_build_dir)
-        # Compile the build commands
-        cmake_cmd = self._tfb_build_template % self._tfb_cfg
-        build_cmd = "cmake --build ./ -- -j %s" % self._tfb_build_threads
+
+        build_cmds = self._tfb_cfg["build_cmds"]
+
+        threads_no_rex = re.compile(r'.*(-j\s?(\d+))')
 
         # Pass the report to later stages
-        rep = {"build_cmd": "%s" % build_cmd,
-               "cmake_cmd": "%s" % cmake_cmd}
+        rep = {"build_cmd": "%s" % ",".join(build_cmds)}
         self.stash("Build Report", rep)
 
-        # Calll camke to configure the project
-        if not subprocess_log(cmake_cmd,
-                              self._tfb_log_f,
-                              prefix=cmake_cmd,
-                              silent=self._tfb_silent):
+        # Calll cmake to configure the project
+        for build_cmd in build_cmds:
+            # if a -j parameter is passed as user argument
+            user_set_threads_match = threads_no_rex.findall(build_cmd)
+
+            if user_set_threads_match:
+                # Unpack the regex groups (fullmatch, decimal match)
+                user_jtxt, user_set_threads = user_set_threads_match[0]
+                if int(user_set_threads) > self._tfb_build_threads:
+                    print("Ignoring user requested n=%s threads because it"
+                          " exceeds the maximum thread set ( %d )" %
+                          (user_set_threads, self._tfb_build_threads))
+                    thread_no = self._tfb_build_threads
+                else:
+                    print("Using %s build threads" % user_set_threads)
+                    thread_no = user_set_threads
+                build_cmd = build_cmd.replace(user_jtxt,
+                                              "-j %s " % thread_no)
+
             # Build it
             if subprocess_log(build_cmd,
                               self._tfb_log_f,
                               append=True,
                               prefix=build_cmd,
                               silent=self._tfb_silent):
+
                 raise Exception("Build Failed please check log: %s" %
                                 self._tfb_log_f)
-        else:
-            raise Exception("Cmake Failed please check log: %s" %
-                            self._tfb_log_f)
 
-        if self._tfb_install:
-            install_cmd = "cmake --build ./ -- -j install"
-            if subprocess_log(install_cmd,
-                              self._tfb_log_f,
-                              append=True,
-                              prefix=install_cmd,
-                              silent=self._tfb_silent):
-                raise Exception(("Make install Failed."
-                                 " please check log: %s") % self._tfb_log_f)
-        if self._tfb_cfg["TARGET_PLATFORM"] == "MUSCA_A":
-            boot_f, s_bin, ns_bin, sns_signed_bin = self.get_binaries(
-                bootl="mcuboot.bin",
-                bin_s="tfm_s.bin",
-                bin_ns="tfm_ns.bin",
-                bin_sign="tfm_sign.bin",
-                filt="MUSCA_A")
-            self.convert_to_hex(boot_f, sns_signed_bin)
         self._t_stop()
 
-    def sign_img(self, secure_bin, non_secure_bin):
-        """Join a secure and non secure image and sign them"""
-
-        imgtool_dir = os.path.join(self._tfb_tfm_dir,
-                                   "bl2/ext/mcuboot/scripts/")
-        flash_layout = os.path.join(self._tfb_tfm_dir,
-                                    "platform/ext/target/musca_a/"
-                                    "partition/flash_layout.h")
-        sign_cert = os.path.join(self._tfb_tfm_dir,
-                                 "bl2/ext/mcuboot/root-rsa-2048.pem")
-        sns_unsigned_bin = os.path.join(self._tfb_build_dir,
-                                        "sns_unsigned.bin")
-        sns_signed_bin = os.path.join(self._tfb_build_dir, "sns_signed.bin")
-
-        # Early versions of the tool hard relative imports, run from its dir
-        os.chdir(imgtool_dir)
-        assemble_cmd = ("python3 assemble.py -l  %(layout)s -s %(s)s "
-                        "-n %(ns)s -o %(sns)s") % {"layout": flash_layout,
-                                                   "s": secure_bin,
-                                                   "ns": non_secure_bin,
-                                                   "sns": sns_unsigned_bin
-                                                   }
-        sign_cmd = ("python3 imgtool.py sign -k %(cert)s --align 1 -v "
-                    "1.0 -H 0x400 --pad 0x30000 "
-                    "%(sns)s %(sns_signed)s") % {"cert": sign_cert,
-                                                 "sns": sns_unsigned_bin,
-                                                 "sns_signed": sns_signed_bin
-                                                 }
-        run_proccess(assemble_cmd)
-        run_proccess(sign_cmd)
-        # Return to build directory
-        os.chdir(self._tfb_build_dir)
-        return sns_signed_bin
-
-    def convert_to_hex(self,
-                       boot_bin,
-                       sns_signed_bin,
-                       qspi_base=0x200000,
-                       boot_size=0x10000):
-        """Convert a signed image to an intel hex format with mcuboot """
-        if self._tfb_install:
-            platform_path = os.path.join(self._tfb_build_dir,
-                                         "install",
-                                         "outputs",
-                                         self._tfb_cfg["TARGET_PLATFORM"])
-            firmware_hex = os.path.join(platform_path, "musca_firmware.hex")
-        else:
-            firmware_hex = os.path.join(self._tfb_build_dir,
-                                        "musca_firmware.hex")
-
-        img_offset = qspi_base + boot_size
-        merge_cmd = ("srec_cat %(boot)s -Binary -offset 0x%(qspi_offset)x "
-                     "%(sns_signed)s -Binary -offset 0x%(img_offset)x "
-                     "-o %(hex)s -Intel") % {"boot": boot_bin,
-                                             "sns_signed": sns_signed_bin,
-                                             "hex": firmware_hex,
-                                             "qspi_offset": qspi_base,
-                                             "img_offset": img_offset
-                                             }
-        run_proccess(merge_cmd)
-        return
-
     def post_eval(self):
         """ Verify that the artefacts exist """
         print("%s Post eval" % self.get_name())
 
         ret_eval = False
         rep = self.unstash("Build Report")
-        missing_binaries = list(filter(lambda x: not os.path.isfile(x),
-                                self._tfb_binaries))
 
-        if len(missing_binaries):
-            print("ERROR: Could not locate the following binaries:")
-            print("\n".join(missing_binaries))
-
-            # Update the artifacts to not include missing ones
-            artf = [n for n in self._tfb_binaries if n not in missing_binaries]
-            # TODO update self._tfb_binaries
-            ret_eval = False
-        else:
-            print("SUCCESS: Produced binaries:")
-            print("\n".join(self._tfb_binaries))
-            ret_eval = True
-
-            artf = self._tfb_binaries
+        artefacts = list_filtered_tree(self._tfb_work_dir, r'%s' %
+                                       self._tfb_cfg["artifact_capture_rex"])
 
         # Add artefact related information to report
         rep["log"] = self._tfb_log_f
-        rep["missing_artefacts"] = missing_binaries
-        rep["artefacts"] = artf
+
+        if not len(artefacts):
+            print("ERROR: Could not capture any binaries:")
+
+            # TODO update self._tfb_binaries
+            ret_eval = False
+        else:
+            print("SUCCESS: Produced the following binaries:")
+            print("\n\t".join(artefacts))
+            ret_eval = True
+
+        rep["artefacts"] = artefacts
+
+        # Proccess the artifacts into file structures
+        art_files = {}
+        for art_item in artefacts:
+            art_f = {"pl_source": 1,
+                     "resource": art_item if not self._tfb_relative_paths
+                     else resolve_rel_path(art_item),
+                     "size": {"bytes": str(os.path.getsize(art_item))}
+                     }
+            if self._tfb_img_sizes and ".axf" in art_item:
+                eabi_size, _ = arm_non_eabi_size(art_item)
+                art_f["size"]["text"] = eabi_size["text"]
+                art_f["size"]["data"] = eabi_size["data"]
+                art_f["size"]["bss"] = eabi_size["bss"]
+            # filename is used as key for artfacts
+            art_files[os.path.split(art_item)[-1]] = art_f
+        rep["artefacts"] = art_files
+
+        if "required_artefacts" in self._tfb_cfg.keys():
+            if len(self._tfb_cfg["required_artefacts"]):
+                print("Searching for required binaries")
+                missing_binaries = list(filter(lambda x: not os.path.isfile(x),
+                                        self._tfb_cfg["required_artefacts"]))
+                if len(missing_binaries):
+                    rep["missing_artefacts"] = missing_binaries
+                    print("ERROR: Missing required artefacts:")
+                    print("\n".join(missing_binaries))
+                    ret_eval = False
+                else:
+                    ret_eval = True
 
         rep["status"] = "Success" if ret_eval else "Failed"
         self.stash("Build Report", rep)
@@ -371,3 +218,7 @@
             print("TFM Builder %s was Successful" % self.get_name())
         else:
             print("TFM Builder %s was UnSuccessful" % self.get_name())
+
+
+if __name__ == "__main__":
+    pass
diff --git a/tfm_ci_pylib/utils.py b/tfm_ci_pylib/utils.py
index 7d1ca46..2096b8b 100755
--- a/tfm_ci_pylib/utils.py
+++ b/tfm_ci_pylib/utils.py
@@ -19,16 +19,20 @@
 __email__ = "minos.galanakis@linaro.org"
 __project__ = "Trusted Firmware-M Open CI"
 __status__ = "stable"
-__version__ = "1.0"
+__version__ = "1.1"
 
 import os
+import re
 import sys
 import yaml
+import requests
 import argparse
 import json
 import itertools
+import xmltodict
+from shutil import move
 from collections import OrderedDict, namedtuple
-from subprocess import Popen, PIPE, STDOUT
+from subprocess import Popen, PIPE, STDOUT, check_output
 
 
 def detect_python3():
@@ -37,6 +41,22 @@
     return sys.version_info > (3, 0)
 
 
+def find_missing_files(file_list):
+    """ Return the files that dot not exist in the file_list """
+
+    F = set(file_list)
+    T = set(list(filter(os.path.isfile, file_list)))
+    return list(F.difference(T))
+
+
+def resolve_rel_path(target_path, origin_path=os.getcwd()):
+    """ Resolve relative path from origin to target. By default origin
+    path is current working directory. """
+
+    common = os.path.commonprefix([origin_path, target_path])
+    return os.path.relpath(target_path, common)
+
+
 def print_test_dict(data_dict,
                     pad_space=80,
                     identation=5,
@@ -248,6 +268,36 @@
     return pcss.returncode
 
 
+def get_pid_status(pid):
+    """ Read the procfc in Linux machines to determine a proccess's statusself.
+    Returns status if proccess exists or None if it does not """
+
+    try:
+        with open("/proc/%s/status" % pid, "r") as F:
+            full_state = F.read()
+            return re.findall(r'(?:State:\t[A-Z]{1} \()(\w+)',
+                              full_state, re.MULTILINE)[0]
+    except Exception as e:
+        print("Exception", e)
+
+
+def check_pid_status(pid, status_list):
+    """ Check a proccess's status againist a provided lists and return True
+    if the proccess exists and has a status included in the list. (Linux) """
+
+    pid_status = get_pid_status(pid)
+
+    if not pid_status:
+        print("PID  %s does not exist." % pid)
+        return False
+
+    ret = pid_status in status_list
+    # TODO Remove debug print
+    if not ret:
+        print("PID status %s not in %s" % (pid_status, ",".join(status_list)))
+    return ret
+
+
 def list_chunks(l, n):
     """ Yield successive n-sized chunks from l. """
 
@@ -276,6 +326,17 @@
     return [build_config(*x) for x in itertools.product(*args)]
 
 
+def show_progress(current_count, total_count):
+    """ Display the percent progress percentage of input metric a over b """
+
+    progress = int((current_count / total_count) * 100)
+    completed_count = int(progress * 0.7)
+    remaining_count = 70 - completed_count
+    print("[ %s%s | %d%% ]" % ("#" * completed_count,
+                               "~" * remaining_count,
+                               progress))
+
+
 def get_cmd_args(descr="", parser=None):
     """ Parse command line arguments """
     # Parse command line arguments to override config
@@ -283,3 +344,230 @@
     if not parser:
         parser = argparse.ArgumentParser(description=descr)
     return parser.parse_args()
+
+
+def arm_non_eabi_size(filename):
+    """ Run arm-non-eabi-size command and parse the output using regex. Will
+    return a tuple with the formated data as well as the raw output of the
+    command """
+
+    size_info_rex = re.compile(r'^\s+(?P<text>[0-9]+)\s+(?P<data>[0-9]+)\s+'
+                               r'(?P<bss>[0-9]+)\s+(?P<dec>[0-9]+)\s+'
+                               r'(?P<hex>[0-9a-f]+)\s+(?P<file>\S+)',
+                               re.MULTILINE)
+
+    eabi_size = check_output(["arm-none-eabi-size",
+                              filename],
+                             timeout=2).decode('UTF-8').rstrip()
+
+    size_data = re.search(size_info_rex, eabi_size)
+
+    return [{"text": size_data.group("text"),
+             "data": size_data.group("data"),
+             "bss": size_data.group("bss"),
+             "dec": size_data.group("dec"),
+             "hex": size_data.group("hex")}, eabi_size]
+
+
+def list_subdirs(directory):
+
+    directory = os.path.abspath(directory)
+    abs_sub_dirs = [os.path.join(directory, n) for n in os.listdir(directory)]
+    return [n for n in abs_sub_dirs if os.path.isdir(os.path.realpath(n))]
+
+
+def get_local_git_info(directory, json_out_f=None):
+    """ Extract git related information from a target directory. It allows
+    optional export to json file """
+
+    directory = os.path.abspath(directory)
+    cur_dir = os.path.abspath(os.getcwd())
+    os.chdir(directory)
+
+    # System commands to collect information
+    cmd1 = "git log HEAD -n 1 --pretty=format:'%H%x09%an%x09%ae%x09%ai%x09%s'"
+    cmd2 = "git log HEAD -n 1  --pretty=format:'%b'"
+    cmd3 = "git remote -v | head -n 1 | awk '{ print $2}';"
+    cmd4 = ("git ls-remote --heads origin | "
+            "grep $(git rev-parse HEAD) | cut -d / -f 3")
+
+    git_info_rex = re.compile(r'(?P<body>^[\s\S]*?)((?:Change-Id:\s)'
+                              r'(?P<change_id>.*)\n)((?:Signed-off-by:\s)'
+                              r'(?P<sign_off>.*)\n?)', re.MULTILINE)
+
+    proc_res = []
+    for cmd in [cmd1, cmd2, cmd3, cmd4]:
+        r, e = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
+        if e:
+            print("Error", e)
+            return
+        else:
+            try:
+                txt_body = r.decode('ascii')
+            except UnicodeDecodeError as E:
+                txt_body = r.decode('utf-8')
+            proc_res.append(txt_body.rstrip())
+
+    # Unpack and tag the data
+    hash, name, email, date, subject = proc_res[0].split('\t')
+
+    _raw_body = proc_res[1]
+    _bd_items = re.findall(r'(Signed-off-by|Change-Id)', _raw_body,
+                           re.MULTILINE)
+
+    signed_off = None
+    body = None
+    change_id = None
+    # If both sign-off and gerrit-id exist
+    if len(_bd_items) == 2:
+        m = git_info_rex.search(_raw_body)
+        print(git_info_rex.findall(_raw_body))
+        if m is not None:
+            match_dict = m.groupdict()
+            if "body" in match_dict.keys():
+                body = match_dict["body"]
+            if "sign_off" in match_dict.keys():
+                signed_off = match_dict["sign_off"]
+            if "change_id" in match_dict.keys():
+                change_id = match_dict["change_id"]
+        else:
+            print("Error: Could not regex parse message", repr(_raw_body))
+            body = _raw_body
+    # If only one of sign-off / gerrit-id exist
+    elif len(_bd_items) == 1:
+        _entry_key = _bd_items[0]
+        body, _extra = _raw_body.split(_entry_key)
+        if _entry_key == "Change-Id":
+            change_id = _extra
+        else:
+            signed_off = _extra
+    # If the message contains commit message body only
+    else:
+        body = _raw_body
+
+    # Attempt to read the branch from Gerrit Trigger
+    try:
+        branch = os.environ["GERRIT_BRANCH"]
+    # IF not compare the commit hash with the remote branches to determine the
+    # branch of origin. Warning this assumes that only one branch has its head
+    # on this commit.
+    except KeyError as E:
+        branch = proc_res[3]
+
+    remote = proc_res[2]
+    # Internal Gerrit specific code
+    # Intended for converting the git remote to a more usuable url
+    known_remotes = ["https://gerrit.oss.arm.com",
+                     "http://gerrit.mirror.oss.arm.com"]
+
+    for kr in known_remotes:
+        if kr in remote:
+            print("Applying Remote specific patch to remote", kr)
+
+            remote = remote.split(kr)[-1][1:]
+            print("REMOTE", remote)
+            remote = "%s/gitweb?p=%s.git;a=commit;h=%s" % (kr, remote, hash)
+            break
+
+    out = {"author": name.strip(),
+           "email": email.strip(),
+           "dir": directory.strip(),
+           "remote": remote.strip(),
+           "date": date.strip(),
+           "commit": hash.strip(),
+           "subject": subject.strip(),
+           "message": body.strip(),
+           "change_id": change_id.strip() if change_id is not None else "N.A",
+           "sign_off": signed_off.strip() if signed_off is not None else "N.A",
+           "branch": branch.strip()}
+
+    # Restore the directory path
+    os.chdir(cur_dir)
+    if json_out_f:
+        save_json(json_out_f, out)
+    return out
+
+
+def get_remote_git_info(url):
+    """ Collect git information from a Linux Kernel web repository """
+
+    auth_rex = re.compile(r'(?:<th>author</th>.*)(?:span>)(.*)'
+                          r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
+    # commiter_rex = re.compile(r'(?:<th>committer</th>.*)(?:</div>)(.*)'
+    #                          r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
+    subject_rex = re.compile(r'(?:\'commit-subject\'>)(.*)(?:</div>)')
+    body_rex = re.compile(r'(?:\'commit-msg\'>)([\s\S^<]*)(?:</div>'
+                          r'<div class=\'diffstat-header\'>)', re.MULTILINE)
+
+    content = requests.get(url).text
+    author, date = re.search(auth_rex, content).groups()
+    subject = re.search(subject_rex, content).groups()[0]
+    body = re.search(body_rex, content).groups()[0]
+    remote, hash = url.split("=")
+
+    outdict = {"author": author,
+               "remote": remote[:-3],
+               "date": date,
+               "commit": hash,
+               "subject": subject,
+               "message": body}
+    # Clean up html noise
+    return {k: re.sub(r'&[a-z]t;?', "", v) for k, v in outdict.items()}
+
+
+def convert_git_ref_path(dir_path):
+    """ If a git long hash is detected in a path move it to a short hash """
+
+    # Detect a git hash on a directory naming format of name_{hash},
+    # {hash}, name-{hash}
+    git_hash_rex = re.compile(r'(?:[_|-])*([a-f0-9]{40})')
+
+    # if checkout directory name contains a git reference convert to short
+    git_hash = git_hash_rex.findall(dir_path)
+    if len(git_hash):
+        d = dir_path.replace(git_hash[0], git_hash[0][:7])
+        print("Renaming %s -> %s", dir_path, d)
+        move(dir_path, d)
+        dir_path = d
+    return dir_path
+
+
+def xml_read(file):
+    """" Read the contects of an xml file and convert it to python object """
+
+    data = None
+    try:
+        with open(file, "r") as F:
+            data = xmltodict.parse(F.read())
+    except Exception as E:
+        print("Error", E)
+    return data
+
+
+def list_filtered_tree(directory, rex_filter=None):
+    ret = []
+    for path, subdirs, files in os.walk(directory):
+        for fname in files:
+            ret.append(os.path.join(path, fname))
+    if rex_filter:
+        rex = re.compile(rex_filter)
+        return [n for n in ret if rex.search(n)]
+    else:
+        return ret
+
+
+def gerrit_patch_from_changeid(remote, change_id):
+    """ Use Gerrit's REST api for a best effort to retrieve the url of the
+    patch-set under review """
+
+    try:
+        r = requests.get('%s/changes/%s' % (remote, change_id),
+                         headers={'Accept': 'application/json'})
+        resp_data = r.text[r.text.find("{"):].rstrip()
+        change_no = json.loads(resp_data)["_number"]
+        return "%s/#/c/%s" % (remote, change_no)
+    except Exception as E:
+        print("Failed to retrieve change (%s) from URL %s" % (change_id,
+                                                              remote))
+        print("Exception Thrown:", E)
+        raise Exception()