Files Removed.
pipelines/tf-m-build-test-review
pipelines/tf-m-dependencies-checkout
tf-m-build-test-nightly.yaml
tf-m-build-test-review.yaml
tf-m-dependencies-checkout.yaml
Five files are no more in use, which could be removed.
Change-Id: I361db25040d92ded9f2fa4f33adef066b5ad69b7
Signed-off-by: xinyu-tfm <xinyu.zhang@arm.com>
diff --git a/pipelines/tf-m-build-test-review b/pipelines/tf-m-build-test-review
deleted file mode 100644
index c6f33e0..0000000
--- a/pipelines/tf-m-build-test-review
+++ /dev/null
@@ -1,1198 +0,0 @@
-#!/usr/bin/env groovy
-
-/**
-* Copyright (c) 2018-2020 ARM Limited
-* SPDX-License-Identifier: BSD-3-Clause
-*
-* The following pipeline compiles, checks code-style and runs fastmodel
-* tests to each gerrit path on tracked branch (tfm master).
-*/
-
-/* Need to define a single status control variable for each stage */
-def checkout_nightlies_job_result = 'FAILURE'
-def checkout_tfm_job_result = 'FAILURE'
-def checkout_lava_runner_job_result = 'FAILURE'
-def checkout_ci_scripts_job_result = 'FAILURE'
-def cppcheck_job_result = 'FAILURE'
-def checkpath_job_result = 'FAILURE'
-def build_job_result = 'FAILURE'
-def build_docs_result = 'FAILURE'
-def artifact_job_result = 'FAILURE'
-def fastmodel_job_result = 'FAILURE'
-def lava_job_result = 'FAILURE'
-def timeout_def_stage = 60
-def timeout_build_stage = 180
-def timeout_test_stage = 180
-/* Variable to store the stage that failed */
-def failed_stage = ''
-
-/* Define docker images used in pipeline */
-def docker_img_orchestrator = 'docker-amd64-bionic'
-def docker_img_builder = 'docker-amd64-bionic'
-
-pipeline {
- agent {
- node {
- label docker_img_orchestrator
- }
- }
- environment {
-
- /* Any paths added here be appened to ENV.PATH */
- EXTRA_PATHS = ('')
-
- /* Trusted Firmware checkout directory */
- TFM_DIR = "tf-m"
-
- /* Trusted Firmware CI Script checkout directory */
- TFM_CI_SCR_DIR = "tfm-ci-scripts"
-
- /* Virtual Evnrioment for Python2/3 name */
- VENV_P2_NAME = "tfm-openci-python2-venv"
- VENV_P3_NAME = "tfm-openci-python3-venv"
-
- /* Check-patch related intermediate text file */
- CKPATCH_DETAILS_FILE = "checkpatch_details.txt"
-
- /* Check-patch ouput log file */
- CKPATCH_SUMMARY_FILE = "checkpatch_summary.json"
-
- /* CPP-check ouput log file */
- CPPCHCK_SUMMARY_FILE = "cppchk_summary.json"
-
- /* File that captures information about checked-out dependencies */
- CI_SCRIPTS_INFO_FILE = "openci_scripts_git_info.json"
-
- /* Git information log file */
- GIT_INFO_FILE = "tfm_git_info.json"
-
- /* Build Wrapper output log file */
- BUILD_SUMMARY_FILE = "build_summary.json"
-
- /* Fast model testing output log file */
- FPV_SUMMARY_FILE = "fvp_test_summary.json"
-
- /* Intermediate lava job definition file (passed to lava dispatcher)*/
- LAVA_TEST_DEFINITION = "arm_cm3ds_mps2_gcc_arm.yaml"
-
- /* Filename for results file provided by LAVA after testing */
- LAVA_JOB_RESULTS = "lava_job_results.yaml"
-
- /* lava output log file */
- LAVA_SUMMARY_FILE = "lava_summary.json"
-
- /* Time the dispatcher will wait before cancelling a job */
- LAVA_DISPATCHER_TIMEOUT_MINS = timeout_test_stage.toString()
-
- /* Fast model testing output log file */
- JENKINS_ENV_INFO_FILE = "jenviroment.json"
-
- /* Directory where build artifacts are stored */
- BUILD_LOCATION = "build-ci-all"
-
- /* Directory where documententation is stored*/
- BUILD_DOC_LOCATION = "build-docs"
-
- /* Directory to store model test logs */
- TEST_LOGS="model_tests_logs"
-
- /* Pipeline specific directory references for internal use (stashing)*/
- FPV_WORK_DIR="FVP_MPS2"
- CMSIS_WORK_DIR="CMSIS_5"
- MBED_CRYPTO_WORK_DIR="mbed-crypto"
- CHECKPATCH_WORK_DIR="checkpatch"
-
- /* ARMLMD_LICENSE_FILE enviroment variable needs to be present
- for ARMCLANG compiler. Ideally that is set from Jenkins ENV.
- The product path depends on build-slave and should be set on
- pipepline */
- ARM_PRODUCT_PATH = '/home/buildslave/tools/sw/mappings'
- ARM_TOOL_VARIANT = 'ult'
- }
- stages {
- stage('ci scripts') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_def_stage, unit:'MINUTES')
- }
- steps {
- script {
- failed_stage = 'ci scripts'
- }
- checkout changelog: false, poll: false, scm: [
- $class: 'GitSCM',
- branches: [[name: '${GERRIT_SCRIPTS_REFSP}']],
- doGenerateSubmoduleConfigurations: false,
- extensions: [
- [
- $class: 'SubmoduleOption',
- disableSubmodules: false,
- parentCredentials: false,
- recursiveSubmodules: true,
- reference: '',
- trackingSubmodules: false
- ],
- [
- $class: 'CloneOption',
- shallow: true
- ],
- [
- $class:
- 'RelativeTargetDirectory',
- relativeTargetDir:
- "${TFM_CI_SCR_DIR}"
- ]
- ],
- submoduleCfg: [],
- userRemoteConfigs: [[
- url: ("${GERRIT_URL}/"
- + "${GERRIT_SCRIPTS}"),
- refspec: ("refs/changes/*:"
- + "refs/changes/*")
- ]]
- ]
-
- /* Clean-up evnironment before stashing */
- sh '''#!/bin/bash
- set -e
- # Capture the git information
- python3 ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --collect --git-info ${TFM_CI_SCR_DIR} \\
- --output-file ${CI_SCRIPTS_INFO_FILE}
- '''.stripIndent()
-
- /* Stash the CI Scripts */
- stash includes: "${TFM_CI_SCR_DIR}/**/*", name: 'ci-scripts'
- stash includes: "${CI_SCRIPTS_INFO_FILE}",
- name: 'ci_scripts_git_info'
- /* Always finish by setting the stage as SUCCESS */
- script {
- checkout_ci_scripts_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: ci scripts"
- cleanWs()
- } /* always */
- } /* post */
- } /* stage */
- stage('Dependencies Checkout') {
- parallel {
- stage('Github (Nightlies)') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_def_stage, unit:'MINUTES')
- }
- steps {
- script {
- failed_stage = 'Github (Nightlies)'
- }
- /* Unstash the components from previous stages */
- unstash 'ci-scripts'
-
- /* Retrieve and extract artifacts from Nightly Job */
- copyArtifacts projectName: "${REF_DEP_CHECKOUT_NAME}"
- unzip zipFile: "${DEPENDENCIES_ARCHIVE_NAME_ZIP}",
- quiet: true
-
- sh '''#!/bin/bash
- set -e
- # Sanity check. Fail if data is untagged
- if [ ! -f ${DEPENDENCIES_CONTENTS_FILE} ]; then
- echo "Missing ${DEPENDENCIES_CONTENTS_FILE}, \\
- please check nightly checkout job."
- exit 1
- else
- echo "Parsing dependencies"
- CMSIS_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} | \\
- jq -r ".[\\"cmsis\\"]\\
- [\\"${DEPENDENCIES_CMSIS_TAG}\\"][\\"dir\\"]")
-
- MBEDCRYPTO_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} \\
- | jq -r ".[\\"mbedcrypto\\"]\\
- [\\"${DEPENDENCIES_MBED_CRYPTO_TAG}\\"][\\"dir\\"]")
- CHECKPATH_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} \\
- | jq -r ".[\\"checkpatch\\"]\\
- [\\"${DEPENDENCIES_CHECKPATCH_TAG}\\"][\\"dir\\"]")
-
- fi
-
- # Check that provided input tags exist
-
- # Create a labeled array for user selections
- USER_TAG_LIST=(CHECKPATCH:"${DEPENDENCIES_CHECKPATCH_TAG}" \\
- MBED_CRYPTO:"${DEPENDENCIES_MBED_CRYPTO_TAG}" \\
- CMSIS:"${DEPENDENCIES_CMSIS_TAG}")
-
- # Get extracted data from DEPENDENCIES_CONTENTS_FILE.
- USER_SEL_LIST="$CHECKPATH_DIR $MBEDCRYPTO_DIR"
- USER_SEL_LIST="$USER_SEL_LIST $CMSIS_DIR"
- IFS=' ' read -ra USR_SEL_LIST <<< "${USER_SEL_LIST}"
-
- TAG_COUNTER=0
- # Look for data not found in contents (jq out -> null)
- for USR_SEL in "${USR_SEL_LIST[@]}"; do
- echo "$USR_SEL > $TAG_COUNTER"
- if [ "$USR_SEL" == "null" ]; then
-
- echo "Dependencies ERROR. "\\
- "TAG: '${USER_TAG_LIST[TAG_COUNTER]}' "\\
- "does not exist in checkout job."
- exit 1
- fi
- ((TAG_COUNTER+=1))
- done
-
- # FIX for Jenkins unzip clearing permissions
- chmod +x $CHECKPATH_DIR/checkpatch.pl
-
- # Move the checked-out data to user-set directories
- mv $CMSIS_DIR $CMSIS_WORK_DIR
-
- # When the directory name from nightlies matches with
- # the expected work dir, non selected entries must be
- #removed
- mv $MBEDCRYPTO_DIR mbedcrypto_tmp
- rm -rf mbed-crypto
- mv $CHECKPATH_DIR checkpatch_tmp && rm -rf checkpatch
- mv checkpatch_tmp $CHECKPATCH_WORK_DIR
- mv mbedcrypto_tmp $MBED_CRYPTO_WORK_DIR
-
-
- CONTENTS="cmsis=${DEPENDENCIES_CMSIS_TAG} \\
- mbedcrypto=${DEPENDENCIES_MBED_CRYPTO_TAG} \\
- checkpatch=${DEPENDENCIES_CHECKPATCH_TAG}" \\
-
- if [ "${LAVA_TEST}" == "Enable" ]; then
- echo "Fetching LAVA Dependancies"
- # Prepare the FPGA image files
- # Remove whitespace between commas
- FPGA_IMAGES_TAG_LIST=$(echo ${FPGA_IMAGES_TAG_LIST} \\
- | sed 's/[[:space:]]*,[[:space:]]*/,/g')
- # Split the string using the comma.
- IFS=',' read -ra FPGA_IMAGES <<< \\
- "${FPGA_IMAGES_TAG_LIST}"
-
- # Download each commit tag
- for FPGA_NAME in "${FPGA_IMAGES[@]}"; do
- # Strip whitespace
- FPGA_NAME=$(echo $FPGA_NAME | xargs)
-
- FPGA_IMAGE_F=$(cat \\
- ${DEPENDENCIES_CONTENTS_FILE} | \\
- jq -r ".[\\"fpga\\"]\\
- [\\"${FPGA_NAME}\\"][\\"recovery\\"]")
-
- if [ "$FPGA_IMAGE_F" == "null" ]; then
- echo "Dependencies ERROR. "\\
- "FPGA TAG: '${FPGA_NAME}' does not "\\
- "exist in checkout job."
- exit 1
- else
- echo "Proccessing FPGA Image: ${FPGA_IMAGE_F}"
- cp ${FPGA_IMAGE_F} ./
- fi
- done
- CONTENTS="${CONTENTS} fpga=${FPGA_IMAGES_TAG_LIST}"
- fi
-
- if [ "${FASTMODEL_TEST}" == "Enable" ]; then
- echo "Fetching Fastmodel Dependancies"
- # Copy the selected Fastmodel directory
- FPV_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} | \\
- jq -r ".[\\"fastmodel\\"]\\
- [\\"${DEPENDENCIES_FPV_TAG}\\"][\\"dir\\"]")
-
- if [ "$FPV_DIR" == "null" ]; then
- echo "Dependencies ERROR. "\\
- "FASTMODEL TAG: '${DEPENDENCIES_FPV_TAG}'" \\
- "does not exist in checkout job."
- exit 1
- fi
- # FIX for Jenkins unzip clearing permissions
- chmod +x $FPV_DIR/FVP_MPS2_AEMv8M
-
- mv $FPV_DIR $FPV_WORK_DIR
-
- # Store the user selection to report
- CONTENTS="${CONTENTS} fastmodel=${DEPENDENCIES_FPV_TAG}"
- fi
-
- # Create a summary of the parameter provided versions
- python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --report ${DEPENDENCIES_CONTENTS_FILE} \\
- --filter-report ${CONTENTS} \\
- --output-file ${DEPENDENCIES_CONTENTS_FILE}
- '''.stripIndent()
-
- stash includes: "${CMSIS_WORK_DIR}/**/*",
- name: 'cmsis'
-
- stash includes:
- "${CHECKPATCH_WORK_DIR}/**/*",
- name: 'checkpatch'
-
- stash includes:
- "${MBED_CRYPTO_WORK_DIR}/**/*",
- name: 'mbedcrypto'
-
- stash includes:
- "${DEPENDENCIES_CONTENTS_FILE}",
- name: 'nightlies_info'
-
- script {
- if (params.FASTMODEL_TEST.equals('Enable')) {
- stash includes: "${FPV_WORK_DIR}/**/*",
- name: 'fvp_binaries'
- }
- if (params.LAVA_TEST.equals('Enable')) {
- archiveArtifacts artifacts:
- "*.tar.gz",
- onlyIfSuccessful: false
- }
- } /* script */
-
- /* Always finish by setting the stage as SUCCESS */
- script {
- checkout_nightlies_job_result = 'SUCCESS'
- failed_stage = ''
- }
-
- } /* steps */
- post {
- always {
- echo "POST: Github (Nightlies)"
- cleanWs()
- } /* always */
- } /* post */
- } /* stage */
-
- stage('tf-m') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_def_stage, unit:'MINUTES')
- }
- steps {
- script {
- failed_stage = 'Checkout Trusted Firmware-M'
- }
- /* Unstash the components from previous stages */
- unstash 'ci-scripts'
-
- checkout changelog: true, poll: false, scm: [
- $class: 'GitSCM',
- branches: [[name: '${GERRIT_REFSPEC}']],
- doGenerateSubmoduleConfigurations: false,
- extensions: [
- [
- $class: 'SubmoduleOption',
- disableSubmodules: false,
- parentCredentials: false,
- recursiveSubmodules: true,
- reference: '',
- trackingSubmodules: false
- ],
- [
- $class: 'RelativeTargetDirectory',
- relativeTargetDir: "${TFM_DIR}"
- ]
- ],
- submoduleCfg: [],
- userRemoteConfigs: [[
- url: ("${GERRIT_URL}/"
- + "${GERRIT_PROJECT}"),
- refspec: ("refs/changes/*:"
- + "refs/changes/*")
- ]]
- ]
- sh '''#!/bin/bash
- set -e
- # Capture the git information
- python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --collect --git-info ${TFM_DIR} \\
- --output-file ${GIT_INFO_FILE}
-
- cat ${GIT_INFO_FILE}
- '''.stripIndent()
-
- stash useDefaultExcludes: false, name: "${TFM_DIR}"
- stash includes: ("${GIT_INFO_FILE}"), name: 'git_info'
-
- /* Always finish by setting the stage as SUCCESS */
- script {
- checkout_tfm_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: Checkout Trusted Firmware-M"
- cleanWs()
- } /* always */
- } /* post */
-
- } /* stage */
-
- } /* parallel */
- } /* stage */
-
- stage('Static Code Analysis') {
- parallel {
- stage('Cppcheck') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_def_stage, unit:'MINUTES')
- }
- steps {
- /* Unstash the components from previous stages */
- unstash 'ci-scripts'
- unstash 'cmsis'
- unstash 'mbedcrypto'
- unstash 'tf-m'
-
- /* Run cppcheck */
- sh '''#!/bin/bash
- set -e
- export PATH=$EXTRA_PATHS:$PATH
-
- # Run cpp-check
- pushd ${TFM_DIR}
- if [ -n "${GERRIT_PATCHSET_REVISION}" ]; then
- # Run CppCheck againist files modified
- echo CppCheck againist Change-set
- ../${TFM_CI_SCR_DIR}/run-cppcheck.sh HEAD
- else
- # Run CppCheck againist the full code-base
- echo CppCheck againist base
- ../${TFM_CI_SCR_DIR}/run-cppcheck.sh
- fi
- popd
-
- # Parse the output into a report
- python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py\\
- --collect --cpp-check-xml \\
- ${TFM_DIR}/build-cppcheck/chk-config.xml \\
- ${TFM_DIR}/build-cppcheck/chk-src.xml \\
- --output-file ${CPPCHCK_SUMMARY_FILE}
-
- head -n -2 ${TFM_DIR}/build-cppcheck/chk-config.xml \\
- > cpp-join.xml
- tail -n +5 ${TFM_DIR}/build-cppcheck/chk-src.xml \\
- >> cpp-join.xml
-
- #Create HTML report
- cppcheck-htmlreport --source-encoding="iso8859-1" \\
- --title="TFM Cppcheck Report" --source-dir=./ \\
- --report-dir=./build-cppcheck-htmlreport/ \\
- --file=./cpp-join.xml
- '''.stripIndent()
-
- stash includes:
- ("${CPPCHCK_SUMMARY_FILE},"
- + "${TFM_DIR}/build-cppcheck/chk-config.xml,"
- + "${TFM_DIR}/build-cppcheck/chk-src.xml"),
- name: 'cppcheck_summary'
-
- /* Archive reports */
- zip dir: "build-cppcheck-htmlreport",
- zipFile: 'cppcheck-htmlreport.zip',
- archive: true
-
- archiveArtifacts artifacts:
- "${CPPCHCK_SUMMARY_FILE}",
- onlyIfSuccessful: false
-
- sh '''#!/bin/bash
- set -e
- # Do not exit pipeline if erros are detected. Generate
- # a trigger file which will notify user
- CPPCHECK_EVAL=$(python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --all-fields-must-pass \\
- --set-success-field 'success = True' \\
- --report ${CPPCHCK_SUMMARY_FILE} \\
- | tail -n 1)
-
- if [ "$CPPCHECK_EVAL" == "Evaluation failed" ]; then
- echo "Cppcheck analysis FAILED"
- touch cppcheck.eval_failed
- fi
- '''.stripIndent()
-
- /* Send email if */
- script {
- if (fileExists('cppcheck.eval_failed')) {
- if (env.GERRIT_PATCHSET_UPLOADER_EMAIL) {
- emailext (
- subject: ("Job '${env.JOB_NAME}'build "
- + "${env.BUILD_NUMBER} has "
- + "failed Cppcheck check."),
- body: ("Gerrit Change: "
- + "${env.GERRIT_CHANGE_URL}.\n"
- + "Please Check console output "
- + " at: ${env.BUILD_URL}\nor "
- + "summary report at: "
- + "${env.BUILD_URL}/artifact/"
- + "${CPPCHCK_SUMMARY_FILE}"),
- to: "${env.GERRIT_PATCHSET_UPLOADER_EMAIL}",
- )
- }
- }
- } /* script */
- /* Always finish by setting the stage as SUCCESS */
- script {
- cppcheck_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: Cppcheck"
- cleanWs()
- } /* always */
- failure {
- script {
- failed_stage = 'Cppcheck'
- }
- }
- } /* post */
- } /* stage */
-
- stage('check-patch') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_def_stage, unit:'MINUTES')
- }
- steps {
-
- /* Unstash the components from previous stages */
- unstash 'tf-m'
- unstash 'ci-scripts'
- unstash 'checkpatch'
-
- //Run check-patch
- sh '''#!/bin/bash
- set -e
- export PATH=$EXTRA_PATHS:$PATH
-
- # Run checkpatch
- pushd ${TFM_DIR}
-
- if [ -n "${GERRIT_PATCHSET_REVISION}" ]; then
- # Run Checkpatch againist files modified
- echo Checkpatch againist change-set
- ../${TFM_CI_SCR_DIR}/run-checkpatch.sh \\
- -l 1 -f ../${CKPATCH_DETAILS_FILE} \\
- -p ../${CHECKPATCH_WORK_DIR}
- else
- # Run Checkpatch againist the full code-base
- echo Checkpatch againist base
- ../${TFM_CI_SCR_DIR}/run-checkpatch.sh \\
- -l 0 -f ../${CKPATCH_DETAILS_FILE} \\
- -p ../${CHECKPATCH_WORK_DIR}
- fi
- popd
-
- # Create the report
- python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --collect \\
- --checkpatch-parse-f ${CKPATCH_DETAILS_FILE} \\
- --output-file ${CKPATCH_SUMMARY_FILE}
- '''.stripIndent()
-
- archiveArtifacts artifacts:
- "${CKPATCH_SUMMARY_FILE}",
- onlyIfSuccessful: false
-
- stash includes:
- "${CKPATCH_SUMMARY_FILE}",
- name: 'checkpatch_summary'
-
- sh '''#!/bin/bash
- set -e
- # Do not exit pipeline if erros are detected. Generate
- # a trigger file which will notify user
- CHECPATCH_EVAL=$(python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --all-fields-must-pass \\
- --set-success-field 'success = True' \\
- --report ${CKPATCH_SUMMARY_FILE} \\
- | tail -n 1)
-
- if [ "$CHECPATCH_EVAL" == "Evaluation failed" ]; then
- echo "Checkpatch analysis FAILED"
- touch checkpatch.eval_failed
- fi
- '''.stripIndent()
-
- /* Send email if */
- script {
- if (fileExists('checkpatch.eval_failed')) {
- if (env.GERRIT_PATCHSET_UPLOADER_EMAIL) {
- emailext (
- subject: ("Job '${env.JOB_NAME}'build "
- + "${env.BUILD_NUMBER} has "
- + "failed Checkpatch check."),
- body: ("Gerrit Change: "
- + "${env.GERRIT_CHANGE_URL}.\n"
- + "Please Check console output "
- + "at: ${env.BUILD_URL}\nor "
- + "summary report at: "
- + "${env.BUILD_URL}/artifact/"
- + "${CKPATCH_SUMMARY_FILE}"),
- to: "${env.GERRIT_PATCHSET_UPLOADER_EMAIL}",
- )
- }
- }
- } /* script */
-
- /* Always finish by setting the stage as SUCCESS */
- script {
- checkpath_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: check-patch"
- cleanWs()
- } /* always */
- failure {
- script {
- failed_stage = 'check-patch'
- }
- }
- } /* post */
- } /* stage */
- } /* parallel */
- } /* stage */
-
- stage('Build') {
- parallel {
- stage('Configurations') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_build_stage, unit:'MINUTES')
- }
- steps {
- script {
- failed_stage = 'Build-Configurations'
- }
- /* Unstash the components from previous stages */
- unstash 'mbedcrypto'
- unstash 'cmsis'
- unstash 'tf-m'
- unstash 'ci-scripts'
-
- //Run the build and generate a summary report file.
- sh '''#!/bin/bash
- set -e
- export PATH=$EXTRA_PATHS:$PATH
-
- # Use build info from config if avaibale.
- if [ -n "${BUILD_CONFIG_JSON}" ]; then
- echo "Overriding build config with user provided one"
- echo ${BUILD_CONFIG_JSON} > build_config.json
- B_CONFIG="-f build_config.json"
- else
- B_CONFIG="--config full --install"
- fi
-
- # Build the project combinations
- python3 ${TFM_CI_SCR_DIR}/build_helper/build_helper.py \\
- -b ${BUILD_LOCATION} -s -l \\
- -p ${BUILD_PARALLEL_NO} -n ${BUILD_THREAD_NO} \\
- -r ${BUILD_SUMMARY_FILE} ${B_CONFIG}
- '''.stripIndent()
-
- /* Archive artifacts before evaluating report. */
- archiveArtifacts artifacts: ("${BUILD_LOCATION}/" +
- "*_*_*_*_*/install/**/*"),
- excludes: '**/*.map',
- onlyIfSuccessful: true
-
- /* Stash binaries to be used by nightly test jobs. */
- stash includes: ("${BUILD_LOCATION}/*_*_*_*_*/" +
- "install/outputs/**/*.*"),
- name: 'build_binaries'
-
- /* Stash summary to be used by upstream project. */
- stash includes: "${BUILD_SUMMARY_FILE}",
- name: 'build_summary'
- archiveArtifacts artifacts: "${BUILD_SUMMARY_FILE}"
-
- sh '''#!/bin/bash
- set -e
- # Parse the output report(exit(1) if build has failed)
- python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --all-fields-must-pass --error_if_failed \\
- --report ${BUILD_SUMMARY_FILE}
- '''.stripIndent()
-
- script {
- build_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: Build"
- /* Save build logs */
- archiveArtifacts artifacts: "build-ci-all/*.log",
- onlyIfSuccessful: false
- cleanWs()
- } /* always */
- } /* post */
- } /* stage */
-
- stage('Documentation') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_build_stage, unit:'MINUTES')
- }
- when {
- allOf {
- environment name: 'BUILD_DOCS', value: 'Enable'
- }
- }
- steps {
- script {
- failed_stage = 'Build-Documentation'
- }
- /* Unstash the components from previous stages */
- unstash 'mbedcrypto'
- unstash 'cmsis'
- unstash 'tf-m'
- unstash 'ci-scripts'
-
- //Run the build and generate a summary report file.
- sh '''#!/bin/bash
- set -e
- export PATH=$EXTRA_PATHS:$PATH
-
- # Build doncumentation
- python3 \\
- ${TFM_CI_SCR_DIR}/build_helper/build_helper.py \\
- -b ${BUILD_DOC_LOCATION} -l --config doxygen
- '''.stripIndent()
-
- archiveArtifacts artifacts: ("${BUILD_DOC_LOCATION}/" +
- "*/install/doc/**/*"),
- onlyIfSuccessful: true
-
- script {
- build_docs_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: Build Documentation"
- cleanWs()
- } /* always */
- } /* post */
- } /* stage */
-
- } /* parallel */
- } /* stage */
-
- stage('Tests') {
- parallel {
- stage('Fastmodel') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: 60, unit:'MINUTES')
- }
- when {
- allOf {
- environment name: 'FASTMODEL_TEST', value: 'Enable'
- }
- }
- steps {
- /* Unstash the components from previous stages */
- unstash 'ci-scripts'
- unstash 'fvp_binaries'
- unstash 'build_binaries'
- unstash 'build_summary'
-
- script {
- failed_stage = 'Fast Model Testing'
- }
- sh '''#!/bin/bash
- set -e
- python3 -u \\
- ${TFM_CI_SCR_DIR}/fastmodel_dispatcher/fastmodel_dispatcher.py \\
- --build_report ${BUILD_SUMMARY_FILE} \\
- --report ${FPV_SUMMARY_FILE}
-
-
- # Filter out the test results
- mkdir -p ${TEST_LOGS}
- mv terminal*.log ${TEST_LOGS}/
- '''.stripIndent()
-
- stash includes: "${FPV_SUMMARY_FILE}",
- name: 'fastmodel_summary'
- archiveArtifacts artifacts: "${FPV_SUMMARY_FILE}"
- archiveArtifacts artifacts: "${TEST_LOGS}/*"
-
- sh '''#!/bin/bash
- set -e
- # Parse the report
- python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --all-fields-must-pass --error_if_failed \\
- --set-success-field 'success = True' \\
- --report ${FPV_SUMMARY_FILE}
- '''.stripIndent()
- script {
- fastmodel_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: Fastmodel Tests"
- cleanWs()
- } /* always */
- } /* post */
- } /* stage */
-
- stage('Lava') {
- agent {
- node {
- label docker_img_builder
- }
- }
- environment {
- /* Authentication token coming from host */
- LAVA_CREDENTIAL = credentials("${LAVA_CREDENTIAL_ID}")
-
- /* Helper function separates USR/PSW credentials. Asign
- them to a more commonly used notation */
- LAVA_USER="${LAVA_CREDENTIAL_USR}"
- LAVA_TOKEN="${LAVA_CREDENTIAL_PSW}"
-
- }
- options {
- timeout(time: timeout_test_stage, unit:'MINUTES')
- }
- when {
- allOf {
- environment name: 'LAVA_TEST', value: 'Enable'
- }
- }
- steps {
- script {
- failed_stage = 'Lava test'
- }
- unstash 'ci-scripts'
-
- sh '''#!/bin/bash
- set -e
- # If no conifig is provided use the built'in
- if [ -n "${LAVA_TEST_CONFIG_JSON}" ]; then
- echo "Overriding test config."
- echo ${LAVA_TEST_CONFIG_JSON} > test_config.json
- T_CONFIG="--config-file test_config.json"
- fi
-
- # Generate the lava definition file
- python3 -u \\
- ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\
- --task-create-definition \\
- --create-definition-build-no ${BUILD_NUMBER} \\
- --override-jenkins-url ${JENKINS_URL} \\
- --override-jenkins-job ${JOB_NAME} \\
- --create-definition-output-file \\
- "${LAVA_TEST_DEFINITION}" \\
- ${T_CONFIG}
-
- # Do not submit invalid jobs
- if [ ! -f "${LAVA_TEST_DEFINITION}" ]; then
- echo "LAVA job file does not exist: ${JOB}"
- exit -1
- fi
-
- # Convert timeout to seconds
- DTOUT=$((${LAVA_DISPATCHER_TIMEOUT_MINS} * 60))
-
- # Submit the job to LAVA
- python3 -u \\
- ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\
- --task-dispatch "$LAVA_TEST_DEFINITION" \\
- --dispatch-timeout ${DTOUT} \\
- --dispatch-lava-url ${LAVA_URL} \\
- --dispatch-lava-job-results-file \\
- ${LAVA_JOB_RESULTS} \\
- --dispatch-lava_token_from_environ
-
- # If tokens are not provided withCredentials bidnings
- # they can be manually set by:
- # --dispatch-lava_token_usr
- # --dispatch-lava_token_secret
-
- # Verify the results and store them into a summary
- python3 \\
- ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\
- --task-lava-parse ${LAVA_JOB_RESULTS} \\
- --output-report "${LAVA_SUMMARY_FILE}"
- '''.stripIndent()
-
- archiveArtifacts artifacts:
- "${LAVA_JOB_RESULTS}," +
- "${LAVA_TEST_DEFINITION}," +
- "${LAVA_SUMMARY_FILE}",
- onlyIfSuccessful: false
-
- stash includes:
- ("${LAVA_SUMMARY_FILE},"
- + "${LAVA_JOB_RESULTS},"
- + "${LAVA_TEST_DEFINITION}"),
- name: 'lava_summary'
-
- sh '''#!/bin/bash
- set -e
- # Parse the output report(exit(1) if build has failed)
- python3 \\
- ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --all-fields-must-pass --error_if_failed \\
- --set-success-field 'success = True' \\
- --report ${LAVA_SUMMARY_FILE}
- '''.stripIndent()
-
- script {
- lava_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- script {
- if (currentBuild.result == 'FAILURE') {
- sh '''#!/bin/bash
- # If no job has been submittied the file
- # will not exist
- if [ ! -f lava_job.id ]; then
- echo "No LAVA job pending"
- exit 0
- fi
-
- # Send the request to cancell the job
- python3 -u \\
- ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\
- --dispatch-lava-url ${LAVA_URL} \\
- --dispatch-cancel $(cat lava_job.id) \\
- --dispatch-lava_token_from_environ
- '''.stripIndent()
-
- } else {
- echo "POST: Lava test passed"
- }
-
- cleanWs()
- } /* script */
- } /* always */
- } /* post */
- } /* stage */
- } /* parallel */
- } /* stage */
-
- stage('Proccess Artifacts & Report') {
- agent {
- node {
- label docker_img_builder
- }
- }
- options {
- timeout(time: timeout_def_stage, unit:'MINUTES')
- }
- steps {
- unstash 'ci_scripts_git_info'
- unstash 'nightlies_info'
- unstash 'git_info'
- unstash 'build_summary'
- unstash 'checkpatch_summary'
- unstash 'cppcheck_summary'
-
- script {
- failed_stage = 'Proccess Artifacts & Report'
-
- if (params.FASTMODEL_TEST.equals('Enable')) {
- unstash 'fastmodel_summary'
- }
- if (params.LAVA_TEST.equals('Enable')) {
- unstash 'lava_summary'
- }
- } /* script */
- //Create build result summary to be posted to gerrit.
- //Basically contactenate build summary got from downstream jobs
- //with a header inserted in between.
-
- /* Unstash the components from previous stages */
- unstash 'ci-scripts'
-
- sh '''#!/bin/bash
- set -e
- # Collect enviroment information
- python3 ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --collect --jenkins-info \\
- --output-file ${JENKINS_ENV_INFO_FILE}
-
- # Merge all the reports into a summary
- REPORT_LIST="cppcheck=${CPPCHCK_SUMMARY_FILE} \\
- checkpatch=${CKPATCH_SUMMARY_FILE} \\
- build=${BUILD_SUMMARY_FILE} \\
- tf-m=${GIT_INFO_FILE} \\
- ci-scripts=${CI_SCRIPTS_INFO_FILE} \\
- dependencies-info=${DEPENDENCIES_CONTENTS_FILE} \\
- jenkings-info=${JENKINS_ENV_INFO_FILE}"
-
- if [ -f "${LAVA_SUMMARY_FILE}" ]; then
- REPORT_LIST="${REPORT_LIST} lava=${LAVA_SUMMARY_FILE}"
- fi
-
- if [ -f "${FPV_SUMMARY_FILE}" ]; then
- REPORT_LIST="${REPORT_LIST} fastmodel=${FPV_SUMMARY_FILE}"
- fi
-
- python3 ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --collate-report $REPORT_LIST \\
- --output-file ${SUMMARY_FILE}
- '''.stripIndent()
-
- /* Archive summary to make it avaibale later. */
- archiveArtifacts artifacts: "${SUMMARY_FILE}"
-
- script {
- def test_pass = false
-
- if (cppcheck_job_result == "SUCCESS" && \
- checkpath_job_result == "SUCCESS" && \
- build_job_result == "SUCCESS") {
-
- /* If a test is enabled but failed, then build
- has failed */
- if (params.FASTMODEL_TEST.equals('Enable') && \
- fastmodel_job_result == "FAILURE") {
- echo "model testing failed"
- currentBuild.result = "FAILURE"
- }
- else if (params.LAVA_TEST.equals('Enable') && \
- lava_job_result == "FAILURE") {
- echo "LAVA testing failed"
- currentBuild.result = "FAILURE"
- }
- else {
- echo "All jobs have been successfull"
- }
- } else {
- echo "One or more jobs have failed"
- echo "Cppcheck status:"
- echo cppcheck_job_result
- echo "Checkpatch status:"
- echo checkpath_job_result
- echo "Build status:"
- echo build_job_result
- echo "Model Test status:"
- echo fastmodel_job_result
- echo "LAVA Test status:"
- echo lava_job_result
- currentBuild.result = "FAILURE"
- }
-
- artifact_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- post {
- always {
- echo "POST: Proccess Artifacts & Report"
- cleanWs()
- } /* always */
- } /* post */
- } /* stage */
- } /* stages */
- post {
- always {
- cleanWs()
- } /* always */
- failure {
- script {
- if (!(params.MAINTAINER_EMAIL.equals(''))) {
- emailext (
- subject: ("Job '${env.JOB_NAME}'build "
- + "${env.BUILD_NUMBER}, failed at stage: "
- + "'${failed_stage}'"),
- body: "Check console output at ${env.BUILD_URL}",
- to: '${MAINTAINER_EMAIL}',
- recipientProviders: [
- [$class: ('CulpritsRecipient'
- + 'Provider')],
- [$class: ('RequesterRecipient'
- + 'Provider')]
- ]
- )
- }
- } /* script */
- } /* failure */
- aborted {
- script {
- if (!(params.MAINTAINER_EMAIL.equals(''))) {
- emailext (
- subject: ("Job '${env.JOB_NAME}'build "
- + "${env.BUILD_NUMBER}, cancelled at stage: "
- + "'${failed_stage}'"),
- body: "Check console output at ${env.BUILD_URL}",
- to: '${MAINTAINER_EMAIL}',
- recipientProviders: [
- [$class: ('CulpritsRecipient'
- + 'Provider')],
- [$class: ('RequesterRecipient'
- + 'Provider')]
- ]
- )
- }
- } /* script */
- } /* failure */
- } /* post */
-} /* pipeline */
diff --git a/pipelines/tf-m-dependencies-checkout b/pipelines/tf-m-dependencies-checkout
deleted file mode 100644
index 3cab2ae..0000000
--- a/pipelines/tf-m-dependencies-checkout
+++ /dev/null
@@ -1,339 +0,0 @@
-#!/usr/bin/env groovy
-
-/**
-* Copyright (c) 2018-2019 ARM Limited
-* SPDX-License-Identifier: BSD-3-Clause
-*
-* The following pipeline checks out code from Github on a daily basis
-* and archive it, reducing the bandwidth the tfm jobs are using.
-*/
-
-pipeline {
- agent {
- node {
- label 'docker-amd64-bionic'
- }
- }
- environment {
- /* Trusted Firmware CI Script checkout directory */
- TFM_CI_SCR_DIR = "tfm-ci-scripts"
-
- /* Name for virtual eviroment */
- VENV_P3_NAME = "tfm-openci-python3-venv"
- }
- options {
- timeout(time: 45, unit:'MINUTES')
- }
- stages {
- stage('ci scripts') {
- steps {
- checkout changelog: false, poll: false, scm: [
- $class: 'GitSCM',
- branches: [[name: '${GERRIT_SCRIPTS_REFSP}']],
- doGenerateSubmoduleConfigurations: false,
- extensions: [
- [
- $class: 'SubmoduleOption',
- disableSubmodules: false,
- parentCredentials: false,
- recursiveSubmodules: true,
- reference: '',
- trackingSubmodules: false
- ],
- [
- $class: 'CloneOption',
- shallow: true
- ],
- [
- $class:
- 'RelativeTargetDirectory',
- relativeTargetDir:
- "${TFM_CI_SCR_DIR}"
- ]
- ],
- submoduleCfg: [],
- userRemoteConfigs: [[
- url: ("${GERRIT_URL}/"
- + "${GERRIT_SCRIPTS}"),
- refspec: ("refs/changes/*:"
- + "refs/changes/*")
- ]]
- ]
-
- /* Clean-up evnironment before stashing */
- stash name: 'ci-scripts'
-
- /* Always finish by setting the stage as SUCCESS */
- script {
- checkout_ci_scripts_job_result = 'SUCCESS'
- failed_stage = ''
- }
- } /* steps */
- } /* stage */
- stage('Dependencies Checkout') {
- parallel {
- stage('mbed-crypto') {
- /* Arhive the workspace for */
- steps {
- script {
- for( def String mbed_crypto_tag :
- "${MBED_CRYPTO_TAG_LIST}".split(",") ) {
- checkout changelog: false, poll: false, scm: [
- $class: 'GitSCM',
- branches: [[name: mbed_crypto_tag.trim()]],
- doGenerateSubmoduleConfigurations: false,
- extensions: [
- [
- $class: 'SubmoduleOption',
- disableSubmodules: false,
- parentCredentials: false,
- recursiveSubmodules: true,
- reference: '',
- trackingSubmodules: false
- ],
- [
- $class: 'CloneOption',
- shallow: false
- ],
- [
- $class: 'RelativeTargetDirectory',
- relativeTargetDir:
- "${MBED_CRYPTO_ROOT_DIR}/" +
- mbed_crypto_tag.trim()
- ]
- ],
- submoduleCfg: [],
- userRemoteConfigs: [[url: '${MBED_CRYPTO_URL}']]
- ]
-
- } /* for */
- }/* script */
- } /* steps */
- } /* stage */
- stage('CMSIS') {
- steps {
- script {
- for( def String cmsis_tag :
- "${CMSIS_5_TAG_LIST}".split(",") ) {
- print(cmsis_tag)
- checkout changelog: false, poll: false, scm: [
- $class: 'GitSCM',
- branches: [[name: cmsis_tag.trim()]],
- doGenerateSubmoduleConfigurations: false,
- extensions: [
- [ $class: 'GitLFSPull'
- ],
- [
- $class: 'SubmoduleOption',
- disableSubmodules: false,
- parentCredentials: false,
- recursiveSubmodules: true,
- reference: '',
- trackingSubmodules: false
- ],
- [
- $class: 'CloneOption',
- shallow: false
- ],
- [
- $class:
- 'RelativeTargetDirectory',
- relativeTargetDir:
- "${CMSIS_ROOT_DIR}/" +
- cmsis_tag.trim()
- ],
- [
- $class: 'SparseCheckoutPaths',
- sparseCheckoutPaths:[
- [$class:'SparseCheckoutPath',
- path:"${CMSIS_5_SPARSE_DIR}/"]
- ]
- ]
- ],
- submoduleCfg: [],
- userRemoteConfigs: [[url: '${CMSIS_5_URL}']]
- ]
- } /* for */
- }/* script */
- } /* steps */
- } /* stage */
- stage('checkpatch') {
- steps {
- sh '''#!/bin/bash
- set -e
-
- # The commit url can be produced from the plain string
- COMMIT_URL=$(echo $CHECKPATCH_URL | \\
- sed 's:plain:commit:'g)
-
- # Split the string using the comma.
- IFS=',' read -ra CP_COMMITS <<< "${CHECKPATCH_COMMITS}"
-
- # Download each commit tag
- for CP_COMMIT in "${CP_COMMITS[@]}"; do
- # Strip whitespace
- CP_COMMIT=$(echo $CP_COMMIT | xargs)
-
- # Create a directory for each commit version
- # NOTE that the HEAD tag is not changed for the
- # directory name
-
- CHECKPATCH_PATH="${CHECKPATCH_ROOT_DIR}/$CP_COMMIT"
- mkdir -p ${CHECKPATCH_PATH}
-
- # Resolve head to the commit number
- if [[ $CP_COMMIT == "HEAD" ]]; then
- CP_COMMIT=$(curl "$COMMIT_URL?id=HEAD" | \\
- grep "<th>commit</th>" | \\
- grep -P -o '[A-Fa-f0-9]{40}' | head -n 1)
- fi
-
- echo "Checking out checkpatch commit $CP_COMMIT"
-
- # Capture version information
- echo "$COMMIT_URL?id=$CP_COMMIT" > \\
- ${CHECKPATCH_PATH}/version.info
-
- # Create filelist and download those items.
- declare -a cp_arr=("checkpatch.pl" \\
- "spelling.txt" "const_structs.checkpatch")
-
- for i in "${cp_arr[@]}"
- do
- echo "${CHECKPATCH_URL}/${i}?id=$CP_COMMIT"
- curl "${CHECKPATCH_URL}/${i}?id=$CP_COMMIT" \\
- --output "./${CHECKPATCH_PATH}/${i}"
- chmod 640 ./${CHECKPATCH_PATH}/${i}
- done
- chmod 750 ./${CHECKPATCH_PATH}/checkpatch.pl
- chmod +x ./${CHECKPATCH_PATH}/checkpatch.pl
- done
- '''.stripIndent()
- } /* steps */
- } /* stage */
- stage('FastModel') {
- when {
- expression {
- return env.FASTMODEL_VERSIONS != "";
- }
- }
- steps {
- sh '''#!/bin/bash
- set -e
- # FASTMODEL_VER_ARRAY=(${FASTMODEL_VERSIONS})
- IFS=',' read -ra FASTMODEL_VER_ARRAY \\
- <<< "${FASTMODEL_VERSIONS}"
-
- # FASTMODEL_FILE_ARRAY=(${FASTMODEL_FILES})
- IFS=',' read -ra FASTMODEL_FILE_ARRAY \\
- <<< "${FASTMODEL_FILES}"
- # FASTMODEL_EXEC_ARRAY=(${FASTMODEL_EXEC_FILES})
-
- IFS=',' read -ra FASTMODEL_EXEC_ARRAY \\
- <<< "${FASTMODEL_EXEC_FILES}"
-
- for FMV in "${FASTMODEL_VER_ARRAY[@]}"; do
- # Strip whitespace
- FMV=$(echo $FMV | xargs)
- mkdir -p $FASTMODEL_ROOT_DIR/$FMV
- for FMF in "${FASTMODEL_FILE_ARRAY[@]}"; do
- # Strip whitespace
- FMF=$(echo $FMF | xargs)
- REMOTE_F=${FASTMODEL_STORE_URL}/$FMV/$FMF
- LOCAL_F=$FASTMODEL_ROOT_DIR/$FMV/$FMF
- curl ${REMOTE_F} --output ${LOCAL_F}
- echo "Downloading ${REMOTE_F} ---> $LOCAL_F"
- if [[ " ${FASTMODEL_EXEC_ARRAY[*]} " == \\
- *"${FMF}"* ]]; then
- echo "Setting executable bit for ${LOCAL_F}"
- chmod +x ${LOCAL_F}
- fi
-
- done
- done
- '''.stripIndent()
- } /* steps */
- } /* stage */
-
- stage('FPGA Images') {
- /* Arhive the workspace for */
- when {
- expression {
- return env.FPGA_IMAGES_TAG_LIST != "";
- }
- }
- steps {
- sh '''#!/bin/bash
- set -e
- # Split the string using the comma.
- IFS=',' read -ra FPGA_IMAGES <<< "${FPGA_IMAGES_TAG_LIST}"
- mkdir ${FPGA_ROOT_DIR}
- # Download each commit tag
- for FPGA_IMAGE in "${FPGA_IMAGES[@]}"; do
- # Strip whitespace
- FPGA_IMAGE=$(echo $FPGA_IMAGE | xargs)
-
- curl ${HTTP_IMAGE_STORE}/${FPGA_IMAGE} \\
- --output ${FPGA_ROOT_DIR}/${FPGA_IMAGE}
- done
- ls ${FPGA_ROOT_DIR}
- '''.stripIndent()
- } /* steps */
- } /* stage */
- } /* Parallel */
- } /* stage */
- stage('Archive') {
- /* Arhive the workspace for */
- steps {
- sh '''#!/bin/bash
- set -e
- CONTENTS="mbedcrypto=./${MBED_CRYPTO_ROOT_DIR} \\
- checkpatch=./${CHECKPATCH_ROOT_DIR} \\
- cmsis=./${CMSIS_ROOT_DIR}"
-
- if [ -n "${FASTMODEL_VERSIONS}" ]; then
- CONTENTS="${CONTENTS} fastmodel=./${FASTMODEL_ROOT_DIR}"
- fi
-
- if [ -n "${FPGA_IMAGES_TAG_LIST}" ]; then
- CONTENTS="${CONTENTS} fpga=./${FPGA_ROOT_DIR}"
- fi
-
-
- # Build the project combinations
- python3 ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\
- --collect \\
- --output-file ${DEPENDENCIES_CONTENTS_FILE} \\
- --dependencies-checkout \\
- --content-paths ${CONTENTS}
-
- rm -rf ${TFM_CI_SCR_DIR}
- '''
- archiveArtifacts artifacts:
- "${DEPENDENCIES_CONTENTS_FILE}",
- onlyIfSuccessful: false
-
- zip zipFile: "${DEPENDENCIES_ARCHIVE_NAME_ZIP}", archive: true
- } /* steps */
- } /* stage */
- }
- post {
- always {
- cleanWs()
- } /* always */
- failure {
- script {
- echo "Job failed"
- /* Only set email if set in params */
- if (!(params.MAINTAINER_EMAIL.equals(''))) {
- emailext (
- subject: "Job '${env.JOB_NAME}' " +
- "failed at build '${env.BUILD_NUMBER}'",
- body: "Check console output at ${env.BUILD_URL}",
- to: '${MAINTAINER_EMAIL}'
- )
- }
- } /* script */
- } /* failure */
- } /* post */
-} /* pipeline */
diff --git a/tf-m-build-test-nightly.yaml b/tf-m-build-test-nightly.yaml
deleted file mode 100644
index cd07b13..0000000
--- a/tf-m-build-test-nightly.yaml
+++ /dev/null
@@ -1,149 +0,0 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-#-------------------------------------------------------------------------------
-
-- job:
- name: tf-m-build-test-nightly
- description: |
- This job is triggered at midnight, builds all configurations and tests
- them on MPS2 board.
- project-type: pipeline
- disabled: false
- sandbox: true
- concurrent: false
- properties:
- - build-discarder:
- days-to-keep: 30
- num-to-keep: 10
- - authorization:
- suihkulokki:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- minosgalanakis:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- bhcopeland:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- triggers:
- - timed: "@midnight"
- parameters:
- - string:
- name: 'GERRIT_URL'
- description: 'Base address of gerrit host'
- default: 'https://review.trustedfirmware.org'
- - string:
- name: 'GERRIT_REFSPEC'
- description: >
- Git branch or "refspec" (e.g. rc3,
- refs/changes/79/91279/2) to work on.
- default: 'master'
- - string:
- name: 'GERRIT_PROJECT'
- description: >
- Repository on gerrit server to test
- against (e.g. iot-sw/tf-m/internal/tf-m
- default: 'trusted-firmware-m'
- - string:
- name: 'REF_DEP_CHECKOUT_NAME'
- description: >
- Reference to the job to check out dependencies artifacts from
- default: 'tf-m-dependencies-checkout'
- - string:
- name: 'DEPENDENCIES_ARCHIVE_NAME_ZIP'
- description: 'Archive name of dependencies artifacts'
- default: 'tfm_dependencies.zip'
- - string:
- name: 'GERRIT_SCRIPTS'
- description: 'Helper scripts used in conjuction with Jenkins'
- default: 'ci/tf-m-ci-scripts'
- - string:
- name: 'GERRIT_SCRIPTS_REFSP'
- description: 'Branch or refspec to checkout'
- default: 'master'
- - text:
- name: 'BUILD_PARALLEL_NO'
- description: "Number of parallel configuration builds."
- default: '12'
- - text:
- name: 'BUILD_THREAD_NO'
- description: "Number of build threads per configuration build."
- default: '6'
- - text:
- name: 'BUILD_CONFIG_JSON'
- description: "Build configuration json formatted."
- - text:
- name: 'LAVA_TEST_CONFIG_JSON'
- description: "Override LAVA Test configuration (json format)."
- - string:
- name: 'SUMMARY_FILE'
- description: "Job output aggregrated summary file(Plain text)."
- default: 'summary.json'
- - string:
- name: 'DEPENDENCIES_CONTENTS_FILE'
- description: 'Artifact name for contents of archive.'
- default: 'dependencies_checkout_contents.json'
- - string:
- name: 'DEPENDENCIES_MBED_CRYPTO_TAG'
- description: 'Selected version of mbed-crypto (from contents)'
- default: '3.0.1'
- - string:
- name: 'DEPENDENCIES_CMSIS_TAG'
- description: 'Selected version of cmsis (from contents)'
- default: '5.5.0'
- - string:
- name: 'DEPENDENCIES_FPV_TAG'
- description: 'Selected version of fastmodel (from contents)'
- default: ''
- - string:
- name: 'DEPENDENCIES_CHECKPATCH_TAG'
- description: 'Selected version of checkpatch (from contents)'
- default: '852d095'
- - string:
- name: 'FPGA_IMAGES_TAG_LIST'
- description: >
- FPGA image names (comma separated list) to be exposed to lava
- default: 'mps2_an521_v3.0, mps2_an519_v3.0'
- - string:
- name: 'LAVA_URL'
- default: 'https://tf.validation.linaro.org'
- description: 'URL of the LAVA instance.'
- - string:
- name: 'LAVA_CREDENTIAL_ID'
- default: 'LAVA_TOKEN'
- description: |
- 'ID of the Jenkins credential containing a LAVA auth token.'
- - string:
- name: 'MAINTAINER_EMAIL'
- description: 'Person to be notified of failure ( empty = disable )'
- default: ''
- - choice:
- name: 'FASTMODEL_TEST'
- choices:
- - Disable
- - Enable
- description: "Set to enable Fastmodel testing of artefacts"
- - choice:
- name: 'LAVA_TEST'
- choices:
- - Disable
- - Enable
- description: "Set to enable LAVA testing of artefacts"
- - choice:
- name: 'BUILD_DOCS'
- choices:
- - Enable
- - Disable
- description: "Set to enable Documentation building"
- dsl:
- !include-raw:
- - ./pipelines/tf-m-build-test-review
diff --git a/tf-m-build-test-review.yaml b/tf-m-build-test-review.yaml
deleted file mode 100644
index fce35c7..0000000
--- a/tf-m-build-test-review.yaml
+++ /dev/null
@@ -1,167 +0,0 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2018-2020, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-#-------------------------------------------------------------------------------
-
-- job:
- name: tf-m-build-test-review
- description: 'This job is triggered by gerrit for TF-M code reviews.'
- project-type: pipeline
- disabled: true
- sandbox: true
- concurrent: true
- properties:
- - build-discarder:
- days-to-keep: 15
- num-to-keep: 45
- - authorization:
- suihkulokki:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- minosgalanakis:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- bhcopeland:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- triggers:
- - gerrit:
- trigger-on:
- - comment-added-event:
- approval-category: 'Allow-CI'
- approval-value: '1'
- dynamic-trigger-enabled: false
- projects:
- - project-compare-type: 'PLAIN'
- project-pattern: 'TF-M/trusted-firmware-m'
- branches:
- - branch-compare-type: 'REG_EXP'
- branch-pattern: 'master'
- disable-strict-forbidden-file-verification: false
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
- silent: false
- silent-start: false
- notification-level: 'OWNER_REVIEWERS'
- parameters:
- - string:
- name: 'GERRIT_URL'
- description: 'Base address of gerrit host'
- default: 'https://review.trustedfirmware.org'
- - string:
- name: 'GERRIT_REFSPEC'
- description: >
- Git branch or "refspec" (e.g. rc3,
- refs/changes/79/91279/2) to work on.
- default: 'master'
- - string:
- name: 'GERRIT_PROJECT'
- description: >
- Repository on gerrit server to test
- against (e.g. iot-sw/tf-m/internal/tf-m
- default: 'trusted-firmware-m'
- - string:
- name: 'REF_DEP_CHECKOUT_NAME'
- description: >
- Reference to the job to check out dependencies artifacts from
- default: 'tf-m-dependencies-checkout'
- - string:
- name: 'DEPENDENCIES_ARCHIVE_NAME_ZIP'
- description: 'Archive name of dependencies artifacts'
- default: 'tfm_dependencies.zip'
- - string:
- name: 'GERRIT_SCRIPTS'
- description: 'Helper scripts used in conjuction with Jenkins'
- default: 'ci/tf-m-ci-scripts'
- - string:
- name: 'GERRIT_SCRIPTS_REFSP'
- description: 'Branch or refspec to checkout'
- default: 'master'
- - text:
- name: 'BUILD_PARALLEL_NO'
- description: "Number of parallel configuration builds."
- default: '12'
- - text:
- name: 'BUILD_THREAD_NO'
- description: "Number of build threads per configuration build."
- default: '6'
- - text:
- name: 'BUILD_CONFIG_JSON'
- description: "Build configuration json formatted."
- - text:
- name: 'LAVA_TEST_CONFIG_JSON'
- description: "Override LAVA Test configuration (json format)."
- - string:
- name: 'SUMMARY_FILE'
- description: "Job output aggregrated summary file(Plain text)."
- default: 'summary.json'
- - string:
- name: 'DEPENDENCIES_CONTENTS_FILE'
- description: 'Artifact name for contents of archive.'
- default: 'dependencies_checkout_contents.json'
- - string:
- name: 'DEPENDENCIES_MBED_CRYPTO_TAG'
- description: 'Selected version of mbed-crypto (from contents)'
- default: '3.0.1'
- - string:
- name: 'DEPENDENCIES_CMSIS_TAG'
- description: 'Selected version of cmsis (from contents)'
- default: '5.5.0'
- - string:
- name: 'DEPENDENCIES_FPV_TAG'
- description: 'Selected version of fastmodel (from contents)'
- default: ''
- - string:
- name: 'DEPENDENCIES_CHECKPATCH_TAG'
- description: 'Selected version of checkpatch (from contents)'
- default: '852d095'
- - string:
- name: 'FPGA_IMAGES_TAG_LIST'
- description: >
- FPGA image names (comma separated list) to be exposed to lava
- default: 'mps2_an521_v3.0, mps2_an519_v3.0'
- - string:
- name: 'LAVA_URL'
- default: 'https://tf.validation.linaro.org'
- description: 'URL of the LAVA instance.'
- - string:
- name: 'LAVA_CREDENTIAL_ID'
- default: 'LAVA_TOKEN'
- description: |
- 'ID of the Jenkins credential containing a LAVA auth token.'
- - string:
- name: 'MAINTAINER_EMAIL'
- description: 'Person to be notified of failure ( empty = disable )'
- default: ''
- - choice:
- name: 'FASTMODEL_TEST'
- choices:
- - Disable
- - Enable
- description: "Set to enable Fastmodel testing of artefacts"
- - choice:
- name: 'LAVA_TEST'
- choices:
- - Disable
- - Enable
- description: "Set to enable LAVA testing of artefacts"
- - choice:
- name: 'BUILD_DOCS'
- choices:
- - Disable
- - Enable
- description: "Set to enable Documentation building"
- dsl:
- !include-raw:
- - ./pipelines/tf-m-build-test-review
diff --git a/tf-m-dependencies-checkout.yaml b/tf-m-dependencies-checkout.yaml
deleted file mode 100644
index 89daab5..0000000
--- a/tf-m-dependencies-checkout.yaml
+++ /dev/null
@@ -1,151 +0,0 @@
-#-------------------------------------------------------------------------------
-# Copyright (c) 2018-2020, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-#-------------------------------------------------------------------------------
-
-- job:
- name: tf-m-dependencies-checkout
- description: >
- This job is manually triggered and collects external code/artifacts
- project-type: pipeline
- disabled: false
- sandbox: true
- concurrent: false
- triggers:
- properties:
- - build-discarder:
- days-to-keep: 30
- num-to-keep: 5
- - authorization:
- suihkulokki:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- minosgalanakis:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- bhcopeland:
- - job-read
- - job-extended-read
- - job-build
- - job-cancel
- parameters:
- - string:
- name: 'GERRIT_URL'
- description: 'Base address of gerrit host'
- default: 'https://review.trustedfirmware.org'
- - string:
- name: 'GERRIT_SCRIPTS'
- description: 'Helper scripts used in conjuction with Jenkins'
- default: 'ci/tf-m-ci-scripts'
- - string:
- name: 'GERRIT_SCRIPTS_REFSP'
- description: 'Branch or refspec to checkout'
- default: 'master'
- - string:
- name: 'MBED_CRYPTO_URL'
- description: 'Mbedtls repository url'
- default: 'https://git.trustedfirmware.org/mirror/mbed-crypto.git'
- - string:
- name: 'CHECKPATCH_URL'
- description: 'Linux kernel repository and path for checkpatch'
- default: "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/\
- linux.git/plain/scripts"
- - string:
- name: 'CMSIS_5_URL'
- description: 'CMSIS5 Repository url'
- default: 'https://git.trustedfirmware.org/mirror/ARM-software/CMSIS_5.git'
- - string:
- name: 'HTTP_IMAGE_STORE'
- description: >
- Server address and port of server hosting the fpga images
- default: 'http://127.0.0.1:8831'
- - string:
- name: 'MBED_CRYPTO_TAG_LIST'
- description: >
- Mbedtls tags/branches to checkout (comma separated list)
- default: 'mbedcrypto-1.1.0,mbedcrypto-3.0.1'
- - string:
- name: 'CMSIS_5_TAG_LIST'
- description: 'CMSIS5 tag/branch to checkout (comma separated list)'
- default: '5.2.0, 5.5.0'
- - string:
- name: 'FPGA_IMAGES_TAG_LIST'
- description: >
- FPGA image names (comma separated list). Clearing this field
- disables fpga checkout stage.
- default: 'mps2_an521_v3.0.tar.gz, mps2_an519_v3.0.tar.gz'
- - string:
- name: 'FASTMODEL_STORE_URL'
- description: >
- Server address and port of server offering the fastmodels.
- They should be served under a FASTMODEL_ROOT_DIR/FASTMODE_VER
- directory format
- default: "http://127.0.0.1:8831/"
- - string:
- name: 'FASTMODEL_VERSIONS'
- default: ''
- description: >
- Fastmodel versions (comma separated list).Clearing this field
- disables fastmodel checkout stage
- - string:
- name: 'FASTMODEL_FILES'
- default: >
- FVP_MPS2_AEMv8M,libarmctmodel.so, libIrisCore.so,
- libMAXCOREInitSimulationEngine.3.so, version.info
- description: 'List of files to download for each model version'
- - string:
- name: 'FASTMODEL_EXEC_FILES'
- default: >
- FVP_MPS2_AEMv8M
- description: >
- List of files will to be set as executables (comma separated list)
- - string:
- name: 'CHECKPATCH_COMMITS'
- description: 'Comma separated list of commits to check out'
- default: "852d095"
- - string:
- name: 'CMSIS_5_SPARSE_DIR'
- description: >
- CMSIS: Only checkout this directory (reducing codespace)
- default: 'CMSIS/RTOS2'
- - string:
- name: 'MBED_CRYPTO_ROOT_DIR'
- description: 'Directory in archive with all mbed-crypto variants'
- default: 'mbed-crypto'
- - string:
- name: 'CMSIS_ROOT_DIR'
- description: 'Directory in archive with all cmsis variants'
- default: 'cmsis'
- - string:
- name: 'CHECKPATCH_ROOT_DIR'
- description: 'Directory in archive with all checkpatch variants'
- default: 'checkpatch'
- - string:
- name: 'FPGA_ROOT_DIR'
- description: 'Directory in archive with all fpga images'
- default: 'fpga'
- - string:
- name: 'FASTMODEL_ROOT_DIR'
- description: 'Directory in archive with all fastmodel variants'
- default: 'fastmodel'
- - string:
- name: 'DEPENDENCIES_ARCHIVE_NAME_ZIP'
- description: 'Archive name of dependencies artifacts'
- default: 'tfm_dependencies.zip'
- - string:
- name: 'DEPENDENCIES_CONTENTS_FILE'
- description: 'Artifact name for contents of archive'
- default: 'dependencies_checkout_contents.json'
- - string:
- name: 'MAINTAINER_EMAIL'
- description: 'Person to be notified of failure ( empty = disable )'
- default: ''
- dsl:
- !include-raw:
- - ./pipelines/tf-m-dependencies-checkout