Refactor the LAVA job generation

Create a LAVA job per test
Use flasher support
Simplify templates
Include metadata
Add LAVA job generation for each parallel build
Implement the LAVA wait and parsing step for simple gerrit verification
Create FVP jobs as well
Change filenames for MPS2 fvp jobs
Fix FVP templates and add NOBL2 jobs
User docker-prefix and license-variable from Jenkins job environment

Change-Id: I4dca28a353bc908a570f578b539aeb9c4528f6fa
Signed-off-by: Dean Birch <dean.birch@arm.com>
diff --git a/build_helper/build_helper_configs.py b/build_helper/build_helper_configs.py
index 9b4deae..0372fc1 100755
--- a/build_helper/build_helper_configs.py
+++ b/build_helper/build_helper_configs.py
@@ -529,7 +529,27 @@
     "seed_params": {
         "target_platform": ["AN521"],
         "compiler": ["ARMCLANG", "GNUARM"],
-        "proj_config": ["ConfigDefault", "ConfigCoreIPCTfmLevel2"],
+        "proj_config": ["ConfigDefault", "ConfigCoreIPCTfmLevel2", "ConfigCoreIPC", "ConfigRegression"],
+        "cmake_build_type": ["Release"],
+        "with_mcuboot": [True, False],
+    },
+    "common_params": _common_tfm_builder_cfg,
+    "invalid": [
+        ("AN521", "ARMCLANG", "ConfigDefault", "Release", False),
+        ("AN521", "ARMCLANG", "ConfigCoreIPCTfmLevel2", "Release", False),
+        ("AN521", "ARMCLANG", "ConfigCoreIPCTfmLevel2", "Release", True),
+        ("AN521", "ARMCLANG", "ConfigCoreIPC", "Release", False),
+        ("AN521", "ARMCLANG", "ConfigCoreIPC", "Release", True),
+        ("AN521", "ARMCLANG", "ConfigRegression", "Release", False),
+        ("AN521", "ARMCLANG", "ConfigRegression", "Release", True),
+    ],
+}
+
+config_lava_debug = {
+    "seed_params": {
+        "target_platform": ["AN521"],
+        "compiler": ["GNUARM"],
+        "proj_config": ["ConfigCoreIPC", "ConfigCoreIPCTfmLevel2", "ConfigRegression"],
         "cmake_build_type": ["Release"],
         "with_mcuboot": [True],
     },
@@ -561,6 +581,8 @@
                     "release": config_release,
                     "an521_psa_api": config_AN521_PSA_API,
                     "an521_psa_ipc": config_AN521_PSA_IPC,
+                    "debug": config_debug,
+                    "lava_debug": config_lava_debug,
                     "ci": config_ci}
 
 if __name__ == '__main__':
diff --git a/jenkins/build-config.jpl b/jenkins/build-config.jpl
index b4263cd..5e0ff1f 100644
--- a/jenkins/build-config.jpl
+++ b/jenkins/build-config.jpl
@@ -31,7 +31,7 @@
         ])
     }
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
     dir("mbed-crypto") {
       checkout(
@@ -39,7 +39,7 @@
         poll: false,
         scm: [
           $class: 'GitSCM',
-          branches: [[name: 'FETCH_HEAD']], 
+          branches: [[name: 'FETCH_HEAD']],
           userRemoteConfigs: [[
             refspec: 'refs/tags/$MBEDCRYPTO_VERSION',
             url: params.MBEDCRYPTO_URL
@@ -63,7 +63,7 @@
             branches: [[name: 'FETCH_HEAD']],
             userRemoteConfigs: [[
               refspec: 'refs/tags/v20.03_API1.0',
-              url: 'https://github.com/ARM-software/psa-arch-tests'
+              url: params.PSA_ARCH_TESTS_URL
             ]]
           ]
         )
@@ -86,6 +86,8 @@
   } finally {
     g = new Gerrit()
     g.verifyStatusInWorkspace(verify, env.CONFIG_NAME, 'build')
+    def buildStatus = (verify == 1) ? 'Successful' : 'Failed'
+    //g.commentInWorkspace("Build configuration ${env.CONFIG_NAME} ${buildStatus}: ${env.RUN_DISPLAY_URL}")
     cleanWs()
   }
 }
diff --git a/jenkins/build-docs.jpl b/jenkins/build-docs.jpl
index 71993b7..983bf16 100644
--- a/jenkins/build-docs.jpl
+++ b/jenkins/build-docs.jpl
@@ -26,7 +26,7 @@
         ])
     }
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
     dir("mbed-crypto") {
       checkout(
@@ -34,7 +34,7 @@
         poll: false,
         scm: [
           $class: 'GitSCM',
-          branches: [[name: 'FETCH_HEAD']], 
+          branches: [[name: 'FETCH_HEAD']],
           userRemoteConfigs: [[
             refspec: 'refs/tags/$MBEDCRYPTO_VERSION',
             url: params.MBEDCRYPTO_URL
@@ -63,6 +63,8 @@
   } finally {
     g = new Gerrit()
     g.verifyStatusInWorkspace(verify, 'tf-m-build-docs', 'build')
+    def buildStatus = (verify == 1) ? 'Successful' : 'Failed'
+    //g.commentInWorkspace("Build docs ${buildStatus}: ${env.RUN_DISPLAY_URL}")
     cleanWs()
   }
 }
diff --git a/jenkins/checkpatch.jpl b/jenkins/checkpatch.jpl
index 7855cb0..e9f242e 100644
--- a/jenkins/checkpatch.jpl
+++ b/jenkins/checkpatch.jpl
@@ -26,7 +26,7 @@
         ])
     }
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
     dir("mbed-crypto") {
       checkout(
@@ -63,6 +63,8 @@
     } finally {
       g = new Gerrit()
       g.verifyStatusInWorkspace(verify, 'checkpatch', 'static')
+      def buildStatus = (verify == 1) ? 'Successful' : 'Failed'
+      //g.commentInWorkspace("Build checkpatch ${buildStatus}: ${env.RUN_DISPLAY_URL}")
       cleanWs()
     }
   }
diff --git a/jenkins/ci.jpl b/jenkins/ci.jpl
index e20b9a3..cedcdc1 100644
--- a/jenkins/ci.jpl
+++ b/jenkins/ci.jpl
@@ -13,23 +13,24 @@
 def listConfigs(ci_scripts_dir, config_list, filter_group) {
   dir(ci_scripts_dir) {
     echo "Obtaining list of configs."
-    echo "Running: ./configs.py -g ${filter_group}"
+    echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}"
     def build_config_list_raw = sh(script: """\
-./configs.py -g ${filter_group}
+python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}
 """, returnStdout: true).trim()
     def build_config_list = build_config_list_raw.tokenize('\n')
     config_list.addAll(build_config_list)
   }
 }
 
-def buildConfig(ci_scripts_dir, config, filter_group) {
+def buildConfig(ci_scripts_dir, config, filter_group, results) {
   def params = []
+  def params_collection = [:]
   def build_config_params
   dir(ci_scripts_dir) {
     echo "Obtaining build configuration for config ${config}"
-    echo "Running: ./configs.py -g ${filter_group} ${config}"
+    echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}"
     build_config_params = sh(script: """\
-./configs.py -g ${filter_group} ${config}
+python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}
 """, returnStdout: true).trim()
   }
   def lines = build_config_params.tokenize('\n')
@@ -37,6 +38,7 @@
     def key, value
     (key, value) = line.tokenize('=')
     params += string(name: key, value: value)
+    params_collection[key] = value
   }
   params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
   params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
@@ -60,8 +62,14 @@
       print("Doing LAVA stuff for ${build_url}")
       params += string(name: 'BUILD_NUMBER', value: "${build_res.number}")
       params += string(name: 'BUILD_URL', value: build_url)
+      params += string(name: 'LAVA_URL', value: env.LAVA_URL)
       def lava_res = build(job: 'tf-m-lava-submit', parameters: params, propagate: false)
-      results['lava_jobs'] += lava_res.getDescription()
+      if (lava_res.result in failure_states) {
+        error("LAVA Create and Submit failed at ${lava_res.getAbsoluteUrl()}")
+      }
+      else {
+        results['lava_jobs'] += lava_res.getDescription()
+      }
     }
   }
 }
@@ -104,14 +112,37 @@
   }
 }
 
+def lineInString(string, match) {
+  def lines = string.split("\n")
+  def result = lines.findAll { it.contains(match) }
+  return result[0]
+}
+
+def getResult(string, match) {
+  line = lineInString(string, match)
+  return(line.split(match)[1].split(' '))
+}
+
+def submitJobsToList(results) {
+  def all_jobs = []
+  for (String result : results){
+    jobs_s = result.split('JOBS: ')
+    if (jobs_s.size() > 1) {
+      all_jobs += jobs_s[1]
+    }
+  }
+  return(all_jobs)
+}
+
 def configs = []
 def builds = [:]
+def results = [:]
 
-node("master") {
+node("docker-amd64-xenial") {
   stage("Init") {
     cleanWs()
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
   }
   stage("Configs") {
@@ -120,7 +151,7 @@
     results['builds'] = [:]
     results['lava_jobs'] = []
     for (config in configs) {
-      builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP)
+      builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP, results)
     }
     builds["docs"] = buildDocs()
   }
@@ -145,12 +176,50 @@
 node("docker-amd64-xenial") {
   stage("Tests") {
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
+    def all_jobs = []
+    def success = true
     print("Wait for LAVA results here...")
-    results['lava_jobs'].each { result ->
-      print(result)
+    try {
+      all_jobs = submitJobsToList(results['lava_jobs'])
+      if (all_jobs.size() > 0) {
+        dir("tf-m-ci-scripts") {
+          withCredentials([usernamePassword(credentialsId: 'LAVA_CREDENTIALS', passwordVariable: 'LAVA_TOKEN', usernameVariable: 'LAVA_USER')]) {
+            output = sh(script: """./lava_helper/lava_wait_jobs.py --job-ids ${all_jobs.join(",")} \
+  --lava-url ${env.LAVA_URL} --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN} \
+  --artifacts-path lava_artifacts \
+  """, returnStdout: true).trim()
+            archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
+            print(output)
+            g = new Gerrit()
+            def boot_result = getResult(output, 'BOOT_RESULT: ')[0]
+            if (boot_result) {
+              g.verifyStatus(boot_result, "lava_boot", "test")
+            }
+            def test_result = getResult(output, 'TEST_RESULT: ')[0]
+            if (test_result) {
+              g.verifyStatus(test_result, "lava_test", "test")
+            }
+            if (boot_result.toInteger() < 1 || test_result.toInteger() < 1) {
+              error("Marking job as failed due to failed boots/tests")
+            }
+          }
+        }
+      }
+      else {
+        print("There were no LAVA jobs to test.")
+      }
+    }
+    catch (Exception e) {
+      print("ERROR: ${e}")
+      success = false
+    } finally {
+      archiveArtifacts artifacts: 'tf-m-ci-scripts/lava_artifacts/**', allowEmptyArchive: true
+      cleanWs()
+      if (!success) {
+        error("There was an Error waiting for LAVA jobs")
+      }
     }
   }
-  cleanWs()
 }
diff --git a/jenkins/cppcheck.jpl b/jenkins/cppcheck.jpl
index fb40a9f..9e2e756 100644
--- a/jenkins/cppcheck.jpl
+++ b/jenkins/cppcheck.jpl
@@ -26,7 +26,7 @@
         ])
     }
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
     dir("mbed-crypto") {
       checkout(
@@ -66,6 +66,8 @@
     } finally {
       g = new Gerrit()
       g.verifyStatusInWorkspace(verify, 'cppcheck', 'static')
+      def buildStatus = (verify == 1) ? 'Successful' : 'Failed'
+      //g.commentInWorkspace("Build cppcheck ${buildStatus}: ${env.RUN_DISPLAY_URL}")
       cleanWs()
     }
   }
diff --git a/jenkins/lava-submit.jpl b/jenkins/lava-submit.jpl
index fbb8611..73d5b78 100644
--- a/jenkins/lava-submit.jpl
+++ b/jenkins/lava-submit.jpl
@@ -10,7 +10,7 @@
   stage("Init") {
     cleanWs()
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
   }
   stage("LAVA") {
@@ -25,12 +25,13 @@
 --build-number ${env.BUILD_NUMBER} --output-dir lava_jobs \
 --compiler ${env.COMPILER} --platform ${env.TARGET_PLATFORM} \
 ${bl2_string} --build-type ${env.CMAKE_BUILD_TYPE} \
---jenkins-build-url ${env.BUILD_URL} --proj-config ${env.PROJ_CONFIG}
+--jenkins-build-url ${env.BUILD_URL} --proj-config ${env.PROJ_CONFIG} \
+--docker-prefix ${env.DOCKER_PREFIX} --license-variable "${env.LICENSE_VARIABLE}"
 """, returnStdout: true).trim()
         print(res)
         job_ids = sh(script: """./lava_helper/lava_submit_jobs.py \
-  --lava-url https://tf.validation.linaro.org --job-dir lava_jobs \
-  --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN}
+  --lava-url ${env.LAVA_URL} --job-dir lava_jobs \
+  --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN} | egrep "^JOBS"
   """, returnStdout: true).trim()
         currentBuild.setDescription(job_ids)
       }
diff --git a/jenkins/static.jpl b/jenkins/static.jpl
index 3f75476..dccdd09 100644
--- a/jenkins/static.jpl
+++ b/jenkins/static.jpl
@@ -22,12 +22,23 @@
   }
 }
 
+def status = 1
+
 stage("Static Checks") {
   def checks = [:]
   checks["cppcheck"] = trigger("tf-m-cppcheck")
   checks["checkpatch"] = trigger("tf-m-checkpatch")
-  parallel(checks)
+  try {
+    parallel(checks)
+  } catch (Exception e) {
+    status = -1
+    echo "Failed static checks, continuing with build."
+  }
 }
 stage("Trigger Build") {
   parallel(["build":trigger("tf-m-build-and-test")])
+  // If previously failed at static checks, mark this as a failure
+  if (status < 0 ) {
+    error("Failing due to failed static checks.")
+  }
 }
diff --git a/lava_helper/jinja2_templates/base.jinja2 b/lava_helper/jinja2_templates/base.jinja2
new file mode 100644
index 0000000..22c9d6b
--- /dev/null
+++ b/lava_helper/jinja2_templates/base.jinja2
@@ -0,0 +1,42 @@
+{#------------------------------------------------------------------------------
+# Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#-----------------------------------------------------------------------------#}
+{%- block metadata %}
+metadata:
+  build_no: {{ build_no }}
+  compiler: {{ compiler }}
+  build_type: {{ build_type }}
+  boot_type: {{ boot_type }}
+  name: {{ name }}
+  platform: {{ platform }}
+  build_name: {{ build_name }}
+  recovery_image_url: {{ recovery_image_url }}
+  firmware_url: {{ firmware_url }}
+  bootloader_url: {{ bootloader_url }}
+  build_job_url: {{ build_job_url }}
+{%- endblock %}
+
+{%- block base %}
+device_type: {{ device_type }}
+job_name: {{ job_name }}
+
+timeouts:
+  job:
+    minutes: {{ job_timeout }}
+  action:
+    minutes: {{ action_timeout }}
+  actions:
+    power-off:
+      minutes: {{ poweroff_timeout }}
+  connections:
+    lava-test-monitor:
+      minutes: {{ monitor_timeout }}
+
+priority: medium
+visibility: public
+{% endblock %}
+{% block actions %}
+{% endblock %}
diff --git a/lava_helper/jinja2_templates/template_tfm_mps2_fvp.jinja2 b/lava_helper/jinja2_templates/template_tfm_mps2_fvp.jinja2
index aec5bd2..c90a0e1 100644
--- a/lava_helper/jinja2_templates/template_tfm_mps2_fvp.jinja2
+++ b/lava_helper/jinja2_templates/template_tfm_mps2_fvp.jinja2
@@ -26,22 +26,23 @@
         url: {{ bootloader_url }}
 
 - boot:
+    failure_retry: 3
     namespace: docker
     method: fvp
     docker:
-      name: 'replace_docker_prefix/lava-fvp-mps2'
+      name: {{ docker_prefix }}/lava-fvp-mps2
     prompts:
     - 'root@lava '
     image: /opt/model/FVP_MPS2_AEMv8M
     timeout:
       minutes: 5
     console_string: 'telnetterminal0: Listening for serial connection on port (?P<PORT>\d+)'
-    license_variable: 'replace_licence_variable'
+    license_variable: '{{ license_variable }}'
     arguments:
-    -  "--application cpu0={S}"
-    -  "--data cpu0={NS}@0x00100000"
-    -  "--simlimit 1200"
+    -  "--application cpu0={NS}"
+    -  "--data cpu0={S}@{{ data_bin_offset }}"
     -  "--parameter fvp_mps2.platform_type=2"
+    -  "--parameter cpu0.baseline=0"
     -  "--parameter cpu0.INITVTOR_S=0x10000000"
     -  "--parameter cpu0.semihosting-enable=0"
     -  "--parameter fvp_mps2.DISABLE_GATING=0"
@@ -51,27 +52,21 @@
     -  "--parameter fvp_mps2.telnetterminal0.quiet=0"
     -  "--parameter fvp_mps2.telnetterminal1.quiet=0"
     -  "--parameter fvp_mps2.telnetterminal2.quiet=0"
-    -  "--parameter fvp_mps2.UART0.unbuffered_output=1"
     -  "--parameter fvp_mps2.UART0.shutdown_on_eot=1"
-    -  "--parameter fvp_mps2.UART1.unbuffered_output=1"
-    -  "--parameter fvp_mps2.UART1.shutdown_on_eot=1"
-    -  "--parameter fvp_mps2.UART2.unbuffered_output=1"
-    -  "--parameter fvp_mps2.UART2.shutdown_on_eot=1"
     -  "--parameter fvp_mps2.mps2_visualisation.disable-visualisation=1"
-    -  "--parameter cpu0.baseline=1"
     prompts:
-    - 'Jumping to non-secure code'
+    - '(.*)'
 
 - test:
     namespace: target
     monitors:
     {%- for monitor in test.monitors %}
-    - name: "{{monitor.name}}_{{ platform }}_{{ compiler }}_{{ name }}_{{ build_type }}_{{ boot_type }}"
+    - name: "{{monitor.name}}"
       start: "{{monitor.start}}"
       end: "{{monitor.end}}"
       pattern: "{{monitor.pattern}}"
       fixupdict:
          '{{monitor.fixup.pass}}': pass
          '{{monitor.fixup.fail}}': fail
-    {% endfor %}
+    {%- endfor %}
 {% endblock %}
diff --git a/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2 b/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
index 6aefc55..870c95f 100644
--- a/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
+++ b/lava_helper/jinja2_templates/template_tfm_mps2_sse_200.jinja2
@@ -4,63 +4,44 @@
 # SPDX-License-Identifier: BSD-3-Clause
 #
 #-----------------------------------------------------------------------------#}
-device_type: {{ device_type }}
-job_name: {{ job_name }}
-
-timeouts:
-  job:
-    minutes: {{ job_timeout }}
-  action:
-    minutes: {{ action_timeout }}
-  actions:
-    power-off:
-      minutes: {{ poweroff_timeout }}
-  connections:
-    lava-test-monitor:
-      minutes: {{ monitor_timeout }}
-
-priority: medium
-visibility: public
-
+{% extends 'jinja2_templates/base.jinja2' %}
+{% block metadata %}
+{{ super() }}
+{% endblock %}
+{% block base %}
+{{ super() }}
+{% endblock %}
+{% block actions %}
 actions:
-{%- for platform, recovery in platforms.items()|sort(reverse=false) %}
-  {% for compiler in compilers|sort(reverse=true) %}
-    {%- for build_type in build_types|sort(reverse=false) %}
-      {%- for boot_type in boot_types|sort(reverse=false) %}
-        {%- for name, test in tests.items()|sort(reverse=false) %}
-    - deploy:
-        to: flasher
-        images:
-          recovery_image:
-            url: {{ recovery_store_url }}/{{ build_no }}/artifact/{{ recovery }}
-            compression: gz
-          test_binary_1:
-            url: {{artifact_store_url}}/{{ build_no }}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/install/outputs/{{ platform }}/{{ test.binaries.firmware }}
-          test_binary_2:
-            url: {{artifact_store_url}}/{{ build_no }}/artifact/build-ci-all/{{ platform }}_{{ compiler }}_Config{{ name }}_{{ build_type }}_{{ boot_type }}/install/outputs/{{ platform }}/{{ test.binaries.bootloader }}
-        namespace: target
+- deploy:
+    to: flasher
+    images:
+      recovery_image:
+        url: {{ recovery_image_url }}
+        compression: gz
+    namespace: target
+    test_binary_1:
+      url: {{ firmware_url}}
+    test_binary_2:
+      url: {{ bootloader_url }}
+    namespace: target
 
-    - boot:
-        method: minimal
-        timeout:
-          minutes: 10
-        namespace: target
+- boot:
+    method: minimal
+    timeout:
+      minutes: 10
+    namespace: target
 
-    - test:
-        namespace: target
-        monitors:
-        {%- for monitor in test.monitors %}
-        - name: "{{monitor.name}}_{{ platform }}_{{ compiler }}_{{ name }}_{{ build_type }}_{{ boot_type }}"
-          start: "{{monitor.start}}"
-          end: "{{monitor.end}}"
-          pattern: "{{monitor.pattern}}"
-          fixupdict:
-             '{{monitor.fixup.pass}}': pass
-             '{{monitor.fixup.fail}}': fail
-
-        {%- endfor %}
-        {%- endfor %}
-      {%- endfor %}
-    {%- endfor %}
-  {%- endfor %}
-{%- endfor %}
+- test:
+    namespace: target
+    monitors:
+    {%- for monitor in test.monitors %}
+    - name: "{{monitor.name}}"
+      start: "{{monitor.start}}"
+      end: "{{monitor.end}}"
+      pattern: "{{monitor.pattern}}"
+      fixupdict:
+         '{{monitor.fixup.pass}}': pass
+         '{{monitor.fixup.fail}}': fail
+    {% endfor %}
+{% endblock %}
diff --git a/lava_helper/jinja2_templates/test_summary.jinja2 b/lava_helper/jinja2_templates/test_summary.jinja2
new file mode 100644
index 0000000..5c0ebb4
--- /dev/null
+++ b/lava_helper/jinja2_templates/test_summary.jinja2
@@ -0,0 +1,49 @@
+<html>
+<head>
+  <title>TF Test Summary</title>
+  <meta charset="UTF-8">
+</head>
+<body>
+
+{% for job in jobs %}
+{% for job_id, data in job.items() %}
+<h3>
+{% if data[0]['health'] == 'Complete' %}
+<font color="green">
+{% else %}
+<font color="red">
+{% endif %}
+Job: {{ job_id }}<br/>
+Description: {{ data[0]['description'] }}<br/>
+Device Type: {{ data[0]['device_type']}}  Health: {{ data[0]['health'] }}  <a href="{{ data[0]['metadata']['build_job_url'] }}">Build Job</a></br>
+<a href="{{ data[0]['artifacts_dir'] }}/definition.yaml">LAVA Definition</a>  <a href="{{ data[0]['lava_url'] }}">LAVA Job</a>  <a href="{{ data[0]['artifacts_dir'] }}/target_log.txt">Target Log</a></br>
+</font>
+</h3>
+{% if data[1] %}
+<h3>
+<table>
+<tr>
+<th>Name</th>
+<th>Suite</th>
+<th>Result</th>
+</tr>
+{% for result in data[1] %}
+<tr>
+<td>{{ result['name'] }} </td>
+<td>{{ result['suite'] }} </td>
+{% if result['result'] == 'pass' %}
+<td style="background-color:green">
+{% else %}
+<td style="background-color:red">
+{% endif %}
+{{ result['result'] }}</td>
+</tr>
+{% endfor %}
+</table>
+</h3>
+{% endif %}
+{% endfor %}
+{% endfor %}
+
+</body>
+</html>
diff --git a/lava_helper/jinja2_templates/test_summary_csv.jinja2 b/lava_helper/jinja2_templates/test_summary_csv.jinja2
new file mode 100644
index 0000000..b7aa4ae
--- /dev/null
+++ b/lava_helper/jinja2_templates/test_summary_csv.jinja2
@@ -0,0 +1,12 @@
+LAVA_JOB_ID,LAVA_HEALTH,LAVA_DEVICE_TYPE,BUILD_FULL_NAME,BUILD_NUMBER,COMPILER,BUILD_TYPE,BOOT_TYPE,PLATFORM,RESULT_SUITE,RESULT_NAME,RESULT
+{%- for job in jobs %}
+{%- for job_id, data in job.items() -%}
+{%- if data[1] %}
+{%- for result in data[1] %}
+{{ job_id }},{{ data[0]['health']}},{{ data[0]['device_type']}},{{ data[0]['metadata']['build_name'] }},{{ data[0]['metadata']['build_no'] }},{{ data[0]['metadata']['compiler'] }},{{ data[0]['metadata']['build_type'] }},{{ data[0]['metadata']['boot_type'] }},{{ data[0]['metadata']['platform'] }},{{ result['suite'] }},{{ result['name'] }},{{ result['result'] }}
+{%- endfor %}
+{%- else %}
+{{ job_id }},{{ data[0]['health']}},{{ data[0]['device_type']}},{{ data[0]['metadata']['build_name'] }},{{ data[0]['metadata']['build_no'] }},{{ data[0]['metadata']['compiler'] }},{{ data[0]['metadata']['build_type'] }},{{ data[0]['metadata']['boot_type'] }},{{ data[0]['metadata']['platform'] }},,,
+{%- endif %}
+{%- endfor %}
+{%- endfor %}
diff --git a/lava_helper/lava_create_jobs.py b/lava_helper/lava_create_jobs.py
new file mode 100755
index 0000000..24bf487
--- /dev/null
+++ b/lava_helper/lava_create_jobs.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+"""
+Script for create LAVA definitions from a single tf-m-build-config
+jenkins Job.
+"""
+
+import os
+import sys
+import shutil
+import argparse
+from copy import deepcopy
+from collections import OrderedDict
+from jinja2 import Environment, FileSystemLoader
+from lava_helper_configs import *
+
+try:
+    from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
+except ImportError:
+    dir_path = os.path.dirname(os.path.realpath(__file__))
+    sys.path.append(os.path.join(dir_path, "../"))
+    from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
+
+
+def load_config_overrides(user_args, config_key):
+    """ Load a configuration from multiple locations and override it with
+    user provided arguements """
+
+    print("Using built-in config: %s" % config_key)
+    try:
+        config = lava_gen_config_map[config_key]
+    except KeyError:
+        print("No template found for config: %s" % config_key)
+        sys.exit(1)
+
+    config["build_no"] = user_args.build_no
+    config["recovery_store_url"] = user_args.jenkins_build_url
+    config["artifact_store_url"] = config["recovery_store_url"]
+
+    #  Add the template folder
+    config["templ"] = os.path.join(user_args.template_dir, config["templ"])
+    return config
+
+
+def get_artifact_url(artifact_store_url, params, filename):
+    platform = params['platform']
+    if params['device_type'] == 'fvp':
+        platform = 'fvp'
+    return "{}/artifact/trusted-firmware-m/build/install/outputs/{}/{}".format(
+        artifact_store_url.rstrip('/'), platform, filename,
+    )
+
+
+def get_recovery_url(recovery_store_url, recovery):
+    return "{}/artifact/{}".format(recovery_store_url.rstrip('/'), recovery)
+
+
+def get_job_name(name, params, job):
+    return "{}_{}_{}_{}_{}_{}_{}_{}".format(
+        name,
+        job,
+        params["platform"],
+        params["build_no"],
+        params["compiler"],
+        params["build_type"],
+        params["boot_type"],
+        params["name"],
+    )
+
+
+def get_build_name(params):
+    return "{}_{}_{}_{}_{}".format(
+        params["platform"],
+        params["compiler"],
+        params["name"],
+        params["build_type"],
+        params["boot_type"],
+    )
+
+
+def generate_test_definitions(config, work_dir, user_args):
+    """ Get a dictionary configuration, and an existing jinja2 template
+    and generate a LAVA compatbile yaml definition """
+
+    template_loader = FileSystemLoader(searchpath=work_dir)
+    template_env = Environment(loader=template_loader)
+    recovery_store_url = user_args.jenkins_build_url
+    build_no = user_args.build_no
+    artifact_store_url = recovery_store_url
+    template_file = config.pop("templ")
+
+    definitions = {}
+
+    for platform, recovery in config["platforms"].items():
+        if platform != user_args.platform:
+            continue
+        recovery_image_url = get_recovery_url(recovery_store_url, recovery)
+        for compiler in config["compilers"]:
+            if compiler != user_args.compiler:
+                continue
+            for build_type in config["build_types"]:
+                if build_type != user_args.build_type:
+                    continue
+                for boot_type in config["boot_types"]:
+                    bl2_string = "BL2" if user_args.bl2 else "NOBL2"
+                    if boot_type != bl2_string:
+                        continue
+                    for test_name, test_dict in config["tests"].items():
+                        if "Config{}".format(test_name) != user_args.proj_config:
+                            continue
+                        params = {
+                            "device_type": config["device_type"],
+                            "job_timeout": config["job_timeout"],
+                            "action_timeout": config["action_timeout"],
+                            "monitor_timeout": config["monitor_timeout"],
+                            "poweroff_timeout": config["poweroff_timeout"],
+                            "compiler": compiler,
+                            "build_type": build_type,
+                            "build_no": build_no,
+                            "boot_type": boot_type,
+                            "name": test_name,
+                            "test": test_dict,
+                            "platform": platform,
+                            "recovery_image_url": recovery_image_url,
+                            "data_bin_offset": config.get('data_bin_offset', ''),
+                            "docker_prefix": vars(user_args).get('docker_prefix', ''),
+                            "license_variable": vars(user_args).get('license_variable', ''),
+                            "build_job_url": artifact_store_url,
+                        }
+                        params.update(
+                            {
+                                "firmware_url": get_artifact_url(
+                                    artifact_store_url,
+                                    params,
+                                    test_dict["binaries"]["firmware"],
+                                ),
+                                "bootloader_url": get_artifact_url(
+                                    artifact_store_url,
+                                    params,
+                                    test_dict["binaries"]["bootloader"],
+                                ),
+                            }
+                        )
+                        params.update(
+                            {
+                                "job_name": get_job_name(
+                                    config["job_name"], params, user_args.jenkins_job,
+                                ),
+                                "build_name": get_build_name(params)
+                            }
+                        )
+
+                        definition = template_env.get_template(template_file).render(
+                            params
+                        )
+                        definitions.update({params["job_name"]: definition})
+    return definitions
+
+
+def generate_lava_job_defs(user_args, config):
+    """ Create a LAVA test job definition file """
+
+    # Evaluate current directory
+    work_dir = os.path.abspath(os.path.dirname(__file__))
+
+    # If a single platform is requested and it exists in the platform
+    if user_args.platform and user_args.platform in config["platforms"]:
+        # Only test this platform
+        platform = user_args.platform
+        config["platforms"] = {platform: config["platforms"][platform]}
+
+    # Generate the ouptut definition
+    definitions = generate_test_definitions(config, work_dir, user_args)
+
+    # Write it into a file
+    out_dir = os.path.abspath(user_args.lava_def_output)
+    os.makedirs(out_dir, exist_ok=True)
+    for name, definition in definitions.items():
+        out_file = os.path.join(out_dir, "{}{}".format(name, ".yaml"))
+        with open(out_file, "w") as F:
+            F.write(definition)
+        print("Definition created at %s" % out_file)
+
+
+def main(user_args):
+    user_args.template_dir = "jinja2_templates"
+    config_keys = lava_gen_config_map.keys()
+    if user_args.config_key:
+        config_keys = [user_args.config_key]
+    for config_key in config_keys:
+        config = load_config_overrides(user_args, config_key)
+        generate_lava_job_defs(user_args, config)
+
+
+def get_cmd_args():
+    """ Parse command line arguments """
+
+    # Parse command line arguments to override config
+    parser = argparse.ArgumentParser(description="Lava Create Jobs")
+    cmdargs = parser.add_argument_group("Create LAVA Jobs")
+
+    # Configuration control
+    cmdargs.add_argument(
+        "--config-name",
+        dest="config_key",
+        action="store",
+        help="Select built-in configuration by name",
+    )
+    cmdargs.add_argument(
+        "--build-number",
+        dest="build_no",
+        action="store",
+        default="lastSuccessfulBuild",
+        help="JENKINS Build number selector. " "Default: lastSuccessfulBuild",
+    )
+    cmdargs.add_argument(
+        "--output-dir",
+        dest="lava_def_output",
+        action="store",
+        default="job_results",
+        help="Set LAVA compatible .yaml output file",
+    )
+    cmdargs.add_argument(
+        "--platform",
+        dest="platform",
+        action="store",
+        help="Override platform.Only the provided one " "will be tested",
+    )
+    cmdargs.add_argument(
+        "--compiler",
+        dest="compiler",
+        action="store",
+        help="Compiler to build definitions for",
+    )
+    cmdargs.add_argument(
+        "--jenkins-build-url",
+        dest="jenkins_build_url",
+        action="store",
+        help="Set the Jenkins URL",
+    )
+    cmdargs.add_argument(
+        "--jenkins-job",
+        dest="jenkins_job",
+        action="store",
+        default="tf-m-build-config",
+        help="Set the jenkins job name",
+    )
+    cmdargs.add_argument(
+        "--proj-config", dest="proj_config", action="store", help="Proj config"
+    )
+    cmdargs.add_argument(
+        "--build-type", dest="build_type", action="store", help="Build type"
+    )
+    cmdargs.add_argument(
+        "--docker-prefix", dest="docker_prefix", action="store", help="Prefix string for the FVP docker registry location"
+    )
+    cmdargs.add_argument(
+        "--license-variable", dest="license_variable", action="store", help="License string for Fastmodels"
+    )
+    cmdargs.add_argument("--bl2", dest="bl2", action="store_true", help="BL2")
+    return parser.parse_args()
+
+
+if __name__ == "__main__":
+    main(get_cmd_args())
diff --git a/lava_helper/lava_helper.py b/lava_helper/lava_helper.py
index 737626d..40665ad 100755
--- a/lava_helper/lava_helper.py
+++ b/lava_helper/lava_helper.py
@@ -2,7 +2,7 @@
 
 """ lava_helper.py:
 
-    Generate custom defined LAVA definitions redered from Jinja2 templates.
+    Generate custom defined LAVA definitions rendered from Jinja2 templates.
     It can also parse the yaml output of LAVA and verify the test outcome """
 
 from __future__ import print_function
@@ -23,6 +23,7 @@
 
 import os
 import sys
+import shutil
 import argparse
 from copy import deepcopy
 from collections import OrderedDict
@@ -75,20 +76,101 @@
         print("\t * %s" % k)
 
 
-def generate_test_definitions(config, work_dir):
+def get_artifact_url(artifact_store_url, params, filename):
+    return "{}/{}/artifact/build-ci-all/{}_{}_Config{}_{}_{}/install/outputs/{}/{}".format(
+        artifact_store_url,
+        params['build_no'],
+        params['platform'],
+        params['compiler'],
+        params['name'],
+        params['build_type'],
+        params['boot_type'],
+        params['platform'],
+        filename,
+    )
+
+def get_recovery_url(recovery_store_url, build_no, recovery):
+    return("{}/{}/artifact/{}".format(
+        recovery_store_url,
+        build_no,
+        recovery
+    ))
+
+def get_job_name(name, params, job):
+    return("{}_{}_{}_{}_{}_{}_{}_{}".format(
+        name,
+        job,
+        params['platform'],
+        params['build_no'],
+        params['compiler'],
+        params['build_type'],
+        params['boot_type'],
+        params['name'],
+        ))
+
+def generate_test_definitions(config, work_dir, user_args):
     """ Get a dictionary configuration, and an existing jinja2 template
     and generate a LAVA compatbile yaml definition """
 
     template_loader = FileSystemLoader(searchpath=work_dir)
     template_env = Environment(loader=template_loader)
-
-    # Ensure that the jinja2 template is always rendered the same way
-    config = sort_lavagen_config(config)
-
+    recovery_store_url = "{}/job/{}".format(
+        user_args.jenkins_url,
+        user_args.jenkins_job
+    )
+    build_no = user_args.build_no
+    artifact_store_url = recovery_store_url
     template_file = config.pop("templ")
 
-    definition = template_env.get_template(template_file).render(**config)
-    return definition
+    definitions = {}
+
+    for platform, recovery in config['platforms'].items():
+        recovery_image_url = get_recovery_url(
+            recovery_store_url,
+            build_no,
+            recovery)
+        for compiler in config['compilers']:
+            for build_type in config['build_types']:
+                for boot_type in config['boot_types']:
+                    for test_name, test_dict in config['tests'].items():
+                        params = {
+                            "device_type": config['device_type'],
+                            "job_timeout": config['job_timeout'],
+                            "action_timeout": config['action_timeout'],
+                            "monitor_timeout": config['monitor_timeout'],
+                            "poweroff_timeout": config['poweroff_timeout'],
+                            'compiler': compiler,
+                            'build_type': build_type,
+                            'build_no': build_no,
+                            'boot_type': boot_type,
+                            'name': test_name,
+                            'test': test_dict,
+                            'platform': platform,
+                            'recovery_image_url': recovery_image_url,
+                            }
+                        params.update({
+                            'firmware_url': get_artifact_url(
+                                artifact_store_url,
+                                params,
+                                test_dict['binaries']['firmware']
+                                ),
+                            'bootloader_url': get_artifact_url(
+                                artifact_store_url,
+                                params,
+                                test_dict['binaries']['bootloader']
+                                )
+                        })
+                        params.update({
+                            'job_name': get_job_name(
+                                config['job_name'],
+                                params,
+                                user_args.jenkins_job,
+                                )
+                        })
+
+                        definition = template_env.get_template(template_file).render(params)
+                        definitions.update({params['job_name']: definition})
+    return definitions
 
 
 def generate_lava_job_defs(user_args, config):
@@ -107,14 +189,18 @@
         config["platforms"] = {platform: config["platforms"][platform]}
 
     # Generate the ouptut definition
-    definition = generate_test_definitions(config, work_dir)
+    definitions = generate_test_definitions(config, work_dir, user_args)
 
     # Write it into a file
-    out_file = os.path.abspath(user_args.lava_def_output)
-    with open(out_file, "w") as F:
-        F.write(definition)
-
-    print("Definition created at %s" % out_file)
+    out_dir = os.path.abspath(user_args.lava_def_output)
+    if os.path.exists(out_dir):
+        shutil.rmtree(out_dir)
+    os.makedirs(out_dir)
+    for name, definition in definitions.items():
+        out_file = os.path.join(out_dir, "{}{}".format(name, ".yaml"))
+        with open(out_file, "w") as F:
+           F.write(definition)
+        print("Definition created at %s" % out_file)
 
 
 def test_map_from_config(lvg_cfg=tfm_mps2_sse_200):
@@ -409,10 +495,10 @@
                        default="lastSuccessfulBuild",
                        help="JENKINGS Build number selector. "
                             "Default: lastSuccessfulBuild")
-    def_g.add_argument("-co", "--create-definition-output-file",
+    def_g.add_argument("-co", "--create-definition-output-dir",
                        dest="lava_def_output",
                        action="store",
-                       default="job_results.yaml",
+                       default="job_results",
                        help="Set LAVA compatible .yaml output file")
 
     # Parameter override commands
diff --git a/lava_helper/lava_helper_configs.py b/lava_helper/lava_helper_configs.py
index 888b0f9..86181dd 100644
--- a/lava_helper/lava_helper_configs.py
+++ b/lava_helper/lava_helper_configs.py
@@ -71,7 +71,7 @@
                 {
                     'name': 'Secure_Test_Suites_Summary',
                     'start': '[Sec Thread]',
-                    'end': '\\x1b\\\[0m',
+                    'end': 'system starting',
                     'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
                                r'(?P<test_case_id>Secure image '
                                r'initializing)(?P<result>!)',
@@ -142,7 +142,7 @@
                 {
                     'name': 'Secure_Test_Suites_Summary',
                     'start': '[Sec Thread]',
-                    'end': '\\x1b\\\[0m',
+                    'end': 'system starting',
                     'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
                                r'(?P<test_case_id>Secure image '
                                r'initializing)(?P<result>!)',
@@ -173,22 +173,152 @@
 }
 
 
-tfm_mps2_fvp = {
+tfm_mps2_fvp_bl2 = {
     "templ": "template_tfm_mps2_fvp.jinja2",
-    "job_name": "mps-fvp",
+    "job_name": "mps2plus-arm-tfm-fvp",
     "device_type": "fvp",
-    "job_timeout": 180,
-    "action_timeout": 90,
-    "monitor_timeout": 90,
-    "poweroff_timeout": 5,
+    "job_timeout": 5,
+    "action_timeout": 2,
+    "monitor_timeout": 2,
+    "poweroff_timeout": 1,
     "recovery_store_url": "%(jenkins_url)s/"
                           "job/%(jenkins_job)s",
     "artifact_store_url": "%(jenkins_url)s/"
                           "job/%(jenkins_job)s",
-    "platforms": {"AN519": "mps2_an521_v3.0.tar.gz"},
-    "compilers": ["GNUARM"],
+    "platforms": {"AN521": "mps2_an521_v3.0.tar.gz"},
+    "compilers": ["GNUARM", "ARMCLANG"],
     "build_types": ["Debug", "Release"],
     "boot_types": ["BL2"],
+    "data_bin_offset": "0x10080000",
+    "tests": {
+        'Default': {
+            "binaries": {
+                "firmware": "mcuboot.axf",
+                "bootloader": "tfm_s_ns_signed.bin"
+            },
+            "monitors": [
+                {
+                    'name': 'Secure_Test_Suites_Summary',
+                    'start': r'[Sec Thread]',
+                    'end': r'system starting',
+                    'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+                               r'(?P<test_case_id>Secure image '
+                               r'initializing)(?P<result>!)',
+                    'fixup': {"pass": "!", "fail": ""},
+                    'required': ["secure_image_initializing"]
+                } # Monitors
+            ]
+        },  # Default
+        'Regression': {
+            "binaries": {
+                "firmware": "mcuboot.axf",
+                "bootloader": "tfm_s_ns_signed.bin"
+            },
+            "monitors": [
+                {
+                    'name': 'Secure_Test_Suites_Summary',
+                    'start': 'Secure test suites summary',
+                    'end': 'End of Secure test suites',
+                    'pattern': r"[\x1b]\\[37mTest suite '(?P<"
+                               r"test_case_id>[^\n]+)' has [\x1b]\\[32m "
+                               r"(?P<result>PASSED|FAILED)",
+                    'fixup': {"pass": "PASSED", "fail": "FAILED"},
+                    'required': [
+                        ("psa_protected_storage_"
+                           "s_interface_tests_tfm_sst_test_2xxx_"),
+                        "sst_reliability_tests_tfm_sst_test_3xxx_",
+                        "sst_rollback_protection_tests_tfm_sst_test_4xxx_",
+                        ("psa_internal_trusted_storage_"
+                           "s_interface_tests_tfm_its_test_2xxx_"),
+                        "its_reliability_tests_tfm_its_test_3xxx_",
+                        ("audit_"
+                         "logging_secure_interface_test_tfm_audit_test_1xxx_"),
+                        "crypto_secure_interface_tests_tfm_crypto_test_5xxx_",
+                        ("initial_attestation_service_"
+                         "secure_interface_tests_tfm_attest_test_1xxx_"),
+                    ]
+                },
+                {
+                    'name': 'Non_Secure_Test_Suites_Summary',
+                    'start': 'Non-secure test suites summary',
+                    'end': r'End of Non-secure test suites',
+                    'pattern': r"[\x1b]\\[37mTest suite '(?P"
+                               r"<test_case_id>[^\n]+)' has [\x1b]\\[32m "
+                               r"(?P<result>PASSED|FAILED)",
+                    'fixup': {"pass": "PASSED", "fail": "FAILED"},
+                    'required': [
+                        ("psa_protected_storage"
+                         "_ns_interface_tests_tfm_sst_test_1xxx_"),
+                        ("psa_internal_trusted_storage"
+                         "_ns_interface_tests_tfm_its_test_1xxx_"),
+                        ("auditlog_"
+                         "non_secure_interface_test_tfm_audit_test_1xxx_"),
+                        ("crypto_"
+                         "non_secure_interface_test_tfm_crypto_test_6xxx_"),
+                        ("initial_attestation_service_"
+                         "non_secure_interface_tests_tfm_attest_test_2xxx_"),
+                        "core_non_secure_positive_tests_tfm_core_test_1xxx_"
+                    ]
+                }
+            ]  # Monitors
+        },  # Regression
+        'CoreIPC': {
+            "binaries": {
+                "firmware": "mcuboot.axf",
+                "bootloader": "tfm_s_ns_signed.bin"
+            },
+            "monitors": [
+                {
+                    'name': 'Secure_Test_Suites_Summary',
+                    'start': r'[Sec Thread]',
+                    'end': r'system starting',
+                    'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+                               r'(?P<test_case_id>Secure image '
+                               r'initializing)(?P<result>!)',
+                    'fixup': {"pass": "!", "fail": ""},
+                    'required': ["secure_image_initializing"]
+                }  # Monitors
+            ]
+        },  # CoreIPC
+        'CoreIPCTfmLevel2': {
+            "binaries": {
+                "firmware": "mcuboot.axf",
+                "bootloader": "tfm_s_ns_signed.bin"
+            },
+            "monitors": [
+                {
+                    'name': 'Secure_Test_Suites_Summary',
+                    'start': r'[Sec Thread]',
+                    'end': r'system starting',
+                    'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
+                               r'(?P<test_case_id>Secure image '
+                               r'initializing)(?P<result>!)',
+                    'fixup': {"pass": "!", "fail": ""},
+                    'required': ["secure_image_initializing"]
+                }  # Monitors
+            ]
+        },  # CoreIPCTfmLevel2
+    }  # Tests
+}
+
+
+tfm_mps2_fvp_nobl2 = {
+    "templ": "template_tfm_mps2_fvp.jinja2",
+    "job_name": "mps2plus-arm-tfm-fvp",
+    "device_type": "fvp",
+    "job_timeout": 5,
+    "action_timeout": 2,
+    "monitor_timeout": 2,
+    "poweroff_timeout": 1,
+    "recovery_store_url": "%(jenkins_url)s/"
+                          "job/%(jenkins_job)s",
+    "artifact_store_url": "%(jenkins_url)s/"
+                          "job/%(jenkins_job)s",
+    "platforms": {"AN521": "mps2_an521_v3.0.tar.gz"},
+    "compilers": ["GNUARM", "ARMCLANG"],
+    "build_types": ["Debug", "Release"],
+    "boot_types": ["NOBL2"],
+    "data_bin_offset": "0x00100000",
     "tests": {
         'Default': {
             "binaries": {
@@ -198,14 +328,14 @@
             "monitors": [
                 {
                     'name': 'Secure_Test_Suites_Summary',
-                    'start': '[Sec Thread]',
-                    'end': '\\x1b\\\[0m',
+                    'start': r'[Sec Thread]',
+                    'end': r'system starting',
                     'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
                                r'(?P<test_case_id>Secure image '
                                r'initializing)(?P<result>!)',
                     'fixup': {"pass": "!", "fail": ""},
                     'required': ["secure_image_initializing"]
-                }  # Monitors
+                }
             ]
         },  # Default
         'Regression': {
@@ -269,8 +399,8 @@
             "monitors": [
                 {
                     'name': 'Secure_Test_Suites_Summary',
-                    'start': '[Sec Thread]',
-                    'end': '\\x1b\\\[0m',
+                    'start': r'[Sec Thread]',
+                    'end': r'system starting',
                     'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
                                r'(?P<test_case_id>Secure image '
                                r'initializing)(?P<result>!)',
@@ -287,8 +417,8 @@
             "monitors": [
                 {
                     'name': 'Secure_Test_Suites_Summary',
-                    'start': '[Sec Thread]',
-                    'end': '\\x1b\\\[0m',
+                    'start': r'[Sec Thread]',
+                    'end': r'system starting',
                     'pattern': r'\x1b\\[1;34m\\[Sec Thread\\] '
                                r'(?P<test_case_id>Secure image '
                                r'initializing)(?P<result>!)',
@@ -302,7 +432,8 @@
 
 # All configurations should be mapped here
 lava_gen_config_map = {"tfm_mps2_sse_200": tfm_mps2_sse_200,
-                       "tfm_mps2_fvp": tfm_mps2_fvp}
+                       "tfm_mps2_fvp_bl2": tfm_mps2_fvp_bl2,
+                       "tfm_mps2_fvp_nobl2": tfm_mps2_fvp_nobl2}
 lavagen_config_sort_order = [
     "templ",
     "job_name",
diff --git a/lava_helper/lava_submit_jobs.py b/lava_helper/lava_submit_jobs.py
index b52bed1..b28700e 100755
--- a/lava_helper/lava_submit_jobs.py
+++ b/lava_helper/lava_submit_jobs.py
@@ -104,7 +104,7 @@
             print("Job submitted at: " + job_url)
             job_id_list.append(job_id)
 
-    print("\n".join(str(x) for x in job_id_list))
+    print("JOBS: {}".format(",".join(str(x) for x in job_id_list)))
 
 def main(user_args):
     lava_dispatch(user_args)
diff --git a/lava_helper/lava_wait_jobs.py b/lava_helper/lava_wait_jobs.py
new file mode 100755
index 0000000..9dbfbc8
--- /dev/null
+++ b/lava_helper/lava_wait_jobs.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+"""
+Script for waiting for LAVA jobs and parsing the results
+"""
+
+import os
+import sys
+import shutil
+import time
+import yaml
+import argparse
+import threading
+from copy import deepcopy
+from collections import OrderedDict
+from jinja2 import Environment, FileSystemLoader
+from lava_helper_configs import *
+from lava_helper import test_lava_dispatch_credentials
+
+try:
+    from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
+        load_yaml, test, print_test
+    from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
+except ImportError:
+    dir_path = os.path.dirname(os.path.realpath(__file__))
+    sys.path.append(os.path.join(dir_path, "../"))
+    from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
+        load_yaml, test, print_test
+    from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
+
+def wait_for_jobs(user_args):
+    job_list = user_args.job_ids.split(",")
+    job_list = [int(x) for x in job_list if x != '']
+    lava = test_lava_dispatch_credentials(user_args)
+    finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5)
+    unfinished_jobs = [item for item in job_list if item not in finished_jobs]
+    for job in unfinished_jobs:
+        info_print("Cancelling unfinished job: {}".format(job))
+        lava.cancel_job(job)
+    if user_args.artifacts_path:
+        for job, info in finished_jobs.items():
+            info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description']))
+            finished_jobs[job] = info
+    finished_jobs = fetch_artifacts(finished_jobs, user_args, lava)
+    print_lava_urls(finished_jobs, user_args)
+    boot_report(finished_jobs, user_args)
+    test_report(finished_jobs, user_args, lava)
+
+def fetch_artifacts(jobs, user_args, lava):
+    if not user_args.artifacts_path:
+        return
+    for job_id, info in jobs.items():
+        job_dir = info['job_dir']
+        info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir))
+        os.makedirs(job_dir, exist_ok=True)
+        def_path = os.path.join(job_dir, 'definition.yaml')
+        target_log = os.path.join(job_dir, 'target_log.txt')
+        config = os.path.join(job_dir, 'config.yaml')
+        definition, metadata = lava.get_job_definition(job_id, def_path)
+        jobs[job_id]['metadata'] = metadata
+        time.sleep(0.2) # be friendly to LAVA
+        lava.get_job_log(job_id, None, target_log)
+        time.sleep(0.2)
+        lava.get_job_config(job_id, config)
+        time.sleep(0.2)
+    return(jobs)
+
+
+def lava_id_to_url(id, user_args):
+    return "{}/scheduler/job/{}".format(user_args.lava_url, id)
+
+def boot_report(jobs, user_args):
+    incomplete_jobs = []
+    for job, info in jobs.items():
+        if info['health'] != 'Complete':
+            if info['error_reason'] == 'Infrastructure':
+                info_print("Job {} failed with Infrastructure error".format(job))
+            incomplete_jobs.append(job)
+    incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs];
+    if len(incomplete_jobs) > 0:
+        print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output))
+    else:
+        print("BOOT_RESULT: +1")
+
+def remove_lava_dupes(results):
+    for result in results:
+        if result['result'] != 'pass':
+            if result['suite'] == "lava":
+                for other in [x for x in results if x != result]:
+                    if other['name'] == result['name']:
+                        if other['result'] == 'pass':
+                            results.remove(result)
+    return(results)
+
+def test_report(jobs, user_args, lava):
+    # parsing of test results is WIP
+    fail_j = []
+    jinja_data = []
+    for job, info in jobs.items():
+        if user_args.artifacts_path:
+            results_file = os.path.join(info['job_dir'], 'results.yaml')
+            results = lava.get_job_results(job, results_file)
+        else:
+            results = lava.get_job_results(job)
+        results = yaml.load(results)
+        #results = remove_lava_dupes(results)
+        non_lava_results = [x for x in results if x['suite'] != 'lava']
+        info['lava_url'] = lava_id_to_url(job, user_args)
+        info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir'])
+        jinja_data.append({job: [info, non_lava_results]})
+        for result in non_lava_results:
+            if result['result'] != 'pass':
+                fail_j.append(job) if job not in fail_j else fail_j
+        time.sleep(0.5) # be friendly to LAVA
+    fail_output = [lava_id_to_url(x, user_args) for x in fail_j]
+    if len(fail_j) > 0:
+        print("TEST_RESULT: -1 Failed: {}".format(fail_output))
+    else:
+        print("TEST_RESULT: +1")
+    data = {}
+    data['jobs'] = jinja_data
+    render_jinja(data)
+
+def render_jinja(data):
+    work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates")
+    template_loader = FileSystemLoader(searchpath=work_dir)
+    template_env = Environment(loader=template_loader)
+    html = template_env.get_template("test_summary.jinja2").render(data)
+    csv = template_env.get_template("test_summary_csv.jinja2").render(data)
+    with open('test_summary.html', "w") as F:
+        F.write(html)
+    with open('test_summary.csv', "w") as F:
+        F.write(csv)
+
+def print_lava_urls(jobs, user_args):
+    output = [lava_id_to_url(x, user_args) for x in jobs]
+    print("LAVA jobs triggered for this build: {}".format(output))
+
+
+def info_print(line):
+    print("INFO: {}".format(line))
+
+def main(user_args):
+    """ Main logic """
+    user_args.lava_rpc = "RPC2"
+    wait_for_jobs(user_args)
+
+def get_cmd_args():
+    """ Parse command line arguments """
+
+    # Parse command line arguments to override config
+    parser = argparse.ArgumentParser(description="Lava Wait Jobs")
+    cmdargs = parser.add_argument_group("Lava Wait Jobs")
+
+    # Configuration control
+    cmdargs.add_argument(
+        "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)"
+    )
+    cmdargs.add_argument(
+        "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS"
+    )
+    cmdargs.add_argument(
+        "--lava-token", dest="token_secret", action="store", help="LAVA auth token"
+    )
+    cmdargs.add_argument(
+        "--lava-user", dest="token_usr", action="store", help="LAVA username"
+    )
+    cmdargs.add_argument(
+        "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment"
+    )
+    cmdargs.add_argument(
+        "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs"
+    )
+    cmdargs.add_argument(
+        "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory"
+    )
+    return parser.parse_args()
+
+
+if __name__ == "__main__":
+    main(get_cmd_args())
diff --git a/src/org/trustedfirmware/Gerrit.groovy b/src/org/trustedfirmware/Gerrit.groovy
index 7addf0d..732fe58 100644
--- a/src/org/trustedfirmware/Gerrit.groovy
+++ b/src/org/trustedfirmware/Gerrit.groovy
@@ -11,7 +11,7 @@
   node("docker-amd64-xenial") {
     cleanWs()
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
     verifyStatusInWorkspace(value, verify_name, category)
   }
@@ -38,7 +38,7 @@
   node("docker-amd64-xenial") {
     cleanWs()
     dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+      git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
     }
     commentInWorkspace(comment)
   }
diff --git a/src/org/trustedfirmware/Summary.groovy b/src/org/trustedfirmware/Summary.groovy
index bb5b16d..ec46775 100644
--- a/src/org/trustedfirmware/Summary.groovy
+++ b/src/org/trustedfirmware/Summary.groovy
@@ -20,7 +20,12 @@
     } else {
       bl2_string = 'NOBL2'
     }
-    row_string = "${params['TARGET_PLATFORM']}_${params['COMPILER']}_${params['CMAKE_BUILD_TYPE']}_${bl2_string}"
+    if (params["PSA_API_SUITE"].isEmpty()) {
+      psa_string = ""
+    } else {
+      psa_string = "_${params['PSA_API_SUITE']}"
+    }
+    row_string = "${params['TARGET_PLATFORM']}_${params['COMPILER']}_${params['CMAKE_BUILD_TYPE']}_${bl2_string}${psa_string}"
     column_string = "${params['PROJ_CONFIG']}"
     row = table[row_string]
     if (row == null) {
diff --git a/tfm_ci_pylib/lava_rpc_connector.py b/tfm_ci_pylib/lava_rpc_connector.py
index e7571db..6bd1493 100644
--- a/tfm_ci_pylib/lava_rpc_connector.py
+++ b/tfm_ci_pylib/lava_rpc_connector.py
@@ -23,6 +23,7 @@
 
 import xmlrpc.client
 import time
+import yaml
 
 
 class LAVA_RPC_connector(xmlrpc.client.ServerProxy, object):
@@ -74,6 +75,53 @@
                 F.write(results)
         return results
 
+    def get_job_definition(self, job_id, yaml_out_file=None):
+        job_def = self.scheduler.jobs.definition(job_id)
+        if yaml_out_file:
+            with open(yaml_out_file, "w") as F:
+                F.write(str(job_def))
+        def_o = yaml.load(job_def)
+        return job_def, def_o.get('metadata', [])
+
+    def write_target_lines(self, target_out_file, log):
+        log = yaml.load(str(log))
+        with open(target_out_file, "w+") as F:
+            for line in log:
+                if line['lvl'] in ['target', 'feedback']:
+                    F.write("{}\n".format(line['msg']))
+
+    def get_job_log(self, job_id, yaml_out_file=None, target_out_file=None):
+        job_res, job_log = self.scheduler.jobs.logs(job_id)
+        if yaml_out_file:
+            with open(yaml_out_file, "w") as F:
+                F.write(str(job_log))
+        if target_out_file:
+            self.write_target_lines(target_out_file, job_log)
+        return job_log
+
+    def get_job_config(self, job_id, yaml_out_file=None):
+        job_config = self.scheduler.jobs.configuration(job_id)
+        if yaml_out_file:
+            with open(yaml_out_file, "w") as F:
+                for data in job_config:
+                    if data:
+                        F.write(str(data))
+        return job_config
+
+    def get_job_info(self, job_id, yaml_out_file=None):
+        job_info = self.scheduler.jobs.show(job_id)
+        if yaml_out_file:
+            with open(yaml_out_file, "w") as F:
+                F.write(str(job_info))
+        return job_info
+
+    def get_error_reason(self, job_id):
+        lava_res = self.results.get_testsuite_results_yaml(job_id, 'lava')
+        results = yaml.load(lava_res)
+        for test in results:
+            if test['name'] == 'job':
+                return(test.get('metadata', {}).get('error_type', ''))
+
     def get_job_state(self, job_id):
         return self.scheduler.job_state(job_id)["job_state"]
 
@@ -147,6 +195,31 @@
                 break
         return self.scheduler.job_health(job_id)["job_health"]
 
+    def block_wait_for_jobs(self, job_ids, timeout, poll_freq=10):
+        """ Wait for multiple LAVA job ids to finish and return finished list """
+
+        start_t = int(time.time())
+        finished_jobs = {}
+        while(True):
+            cur_t = int(time.time())
+            if cur_t - start_t >= timeout:
+                print("Breaking because of timeout")
+                break
+            for job_id in job_ids:
+                # Check if the job is not running
+                cur_status = self.get_job_info(job_id)
+                # If in queue or running wait
+                if cur_status['state'] in ["Canceling","Finished"]:
+                    cur_status['error_reason'] = self.get_error_reason(job_id)
+                    finished_jobs[job_id] = cur_status
+                if len(job_ids) == len(finished_jobs):
+                    break
+                else:
+                    time.sleep(poll_freq)
+            if len(job_ids) == len(finished_jobs):
+                break
+        return finished_jobs
+
     def test_credentials(self):
         """ Attempt to querry the back-end and verify that the user provided
         authentication is valid """