| #!/usr/bin/env groovy |
| |
| /** |
| * Copyright (c) 2018-2020 ARM Limited |
| * SPDX-License-Identifier: BSD-3-Clause |
| * |
| * The following pipeline compiles, checks code-style and runs fastmodel |
| * tests to each gerrit path on tracked branch (tfm master). |
| */ |
| |
| /* Need to define a single status control variable for each stage */ |
| def checkout_nightlies_job_result = 'FAILURE' |
| def checkout_tfm_job_result = 'FAILURE' |
| def checkout_lava_runner_job_result = 'FAILURE' |
| def checkout_ci_scripts_job_result = 'FAILURE' |
| def cppcheck_job_result = 'FAILURE' |
| def checkpath_job_result = 'FAILURE' |
| def build_job_result = 'FAILURE' |
| def build_docs_result = 'FAILURE' |
| def artifact_job_result = 'FAILURE' |
| def fastmodel_job_result = 'FAILURE' |
| def lava_job_result = 'FAILURE' |
| def timeout_def_stage = 60 |
| def timeout_build_stage = 180 |
| def timeout_test_stage = 180 |
| /* Variable to store the stage that failed */ |
| def failed_stage = '' |
| |
| /* Define docker images used in pipeline */ |
| def docker_img_orchestrator = 'docker-amd64-xenial' |
| def docker_img_builder = 'docker-amd64-xenial' |
| |
| pipeline { |
| agent { |
| node { |
| label docker_img_orchestrator |
| } |
| } |
| environment { |
| |
| /* Any paths added here be appened to ENV.PATH */ |
| EXTRA_PATHS = ('') |
| |
| /* Trusted Firmware checkout directory */ |
| TFM_DIR = "tf-m" |
| |
| /* Trusted Firmware CI Script checkout directory */ |
| TFM_CI_SCR_DIR = "tfm-ci-scripts" |
| |
| /* Virtual Evnrioment for Python2/3 name */ |
| VENV_P2_NAME = "tfm-openci-python2-venv" |
| VENV_P3_NAME = "tfm-openci-python3-venv" |
| |
| /* Check-patch related intermediate text file */ |
| CKPATCH_DETAILS_FILE = "checkpatch_details.txt" |
| |
| /* Check-patch ouput log file */ |
| CKPATCH_SUMMARY_FILE = "checkpatch_summary.json" |
| |
| /* CPP-check ouput log file */ |
| CPPCHCK_SUMMARY_FILE = "cppchk_summary.json" |
| |
| /* File that captures information about checked-out dependencies */ |
| CI_SCRIPTS_INFO_FILE = "openci_scripts_git_info.json" |
| |
| /* Git information log file */ |
| GIT_INFO_FILE = "tfm_git_info.json" |
| |
| /* Build Wrapper output log file */ |
| BUILD_SUMMARY_FILE = "build_summary.json" |
| |
| /* Fast model testing output log file */ |
| FPV_SUMMARY_FILE = "fvp_test_summary.json" |
| |
| /* Intermediate lava job definition file (passed to lava dispatcher)*/ |
| LAVA_TEST_DEFINITION = "arm_cm3ds_mps2_gcc_arm.yaml" |
| |
| /* Filename for results file provided by LAVA after testing */ |
| LAVA_JOB_RESULTS = "lava_job_results.yaml" |
| |
| /* lava output log file */ |
| LAVA_SUMMARY_FILE = "lava_summary.json" |
| |
| /* Time the dispatcher will wait before cancelling a job */ |
| LAVA_DISPATCHER_TIMEOUT_MINS = timeout_test_stage.toString() |
| |
| /* Fast model testing output log file */ |
| JENKINS_ENV_INFO_FILE = "jenviroment.json" |
| |
| /* Directory where build artifacts are stored */ |
| BUILD_LOCATION = "build-ci-all" |
| |
| /* Directory where documententation is stored*/ |
| BUILD_DOC_LOCATION = "build-docs" |
| |
| /* Directory to store model test logs */ |
| TEST_LOGS="model_tests_logs" |
| |
| /* Pipeline specific directory references for internal use (stashing)*/ |
| FPV_WORK_DIR="FVP_MPS2" |
| CMSIS_WORK_DIR="CMSIS_5" |
| MBED_CRYPTO_WORK_DIR="mbed-crypto" |
| CHECKPATCH_WORK_DIR="checkpatch" |
| |
| /* ARMLMD_LICENSE_FILE enviroment variable needs to be present |
| for ARMCLANG compiler. Ideally that is set from Jenkins ENV. |
| The product path depends on build-slave and should be set on |
| pipepline */ |
| ARM_PRODUCT_PATH = '/home/buildslave/tools/sw/mappings' |
| ARM_TOOL_VARIANT = 'ult' |
| } |
| stages { |
| stage('ci scripts') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_def_stage, unit:'MINUTES') |
| } |
| steps { |
| script { |
| failed_stage = 'ci scripts' |
| } |
| checkout changelog: false, poll: false, scm: [ |
| $class: 'GitSCM', |
| branches: [[name: '${GERRIT_SCRIPTS_REFSP}']], |
| doGenerateSubmoduleConfigurations: false, |
| extensions: [ |
| [ |
| $class: 'SubmoduleOption', |
| disableSubmodules: false, |
| parentCredentials: false, |
| recursiveSubmodules: true, |
| reference: '', |
| trackingSubmodules: false |
| ], |
| [ |
| $class: 'CloneOption', |
| shallow: true |
| ], |
| [ |
| $class: |
| 'RelativeTargetDirectory', |
| relativeTargetDir: |
| "${TFM_CI_SCR_DIR}" |
| ] |
| ], |
| submoduleCfg: [], |
| userRemoteConfigs: [[ |
| url: ("${GERRIT_URL}/" |
| + "${GERRIT_SCRIPTS}"), |
| refspec: ("refs/changes/*:" |
| + "refs/changes/*") |
| ]] |
| ] |
| |
| /* Clean-up evnironment before stashing */ |
| sh '''#!/bin/bash |
| set -e |
| # Capture the git information |
| python3 ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --collect --git-info ${TFM_CI_SCR_DIR} \\ |
| --output-file ${CI_SCRIPTS_INFO_FILE} |
| '''.stripIndent() |
| |
| /* Stash the CI Scripts */ |
| stash includes: "${TFM_CI_SCR_DIR}/**/*", name: 'ci-scripts' |
| stash includes: "${CI_SCRIPTS_INFO_FILE}", |
| name: 'ci_scripts_git_info' |
| /* Always finish by setting the stage as SUCCESS */ |
| script { |
| checkout_ci_scripts_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: ci scripts" |
| cleanWs() |
| } /* always */ |
| } /* post */ |
| } /* stage */ |
| stage('Dependencies Checkout') { |
| parallel { |
| stage('Github (Nightlies)') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_def_stage, unit:'MINUTES') |
| } |
| steps { |
| script { |
| failed_stage = 'Github (Nightlies)' |
| } |
| /* Unstash the components from previous stages */ |
| unstash 'ci-scripts' |
| |
| /* Retrieve and extract artifacts from Nightly Job */ |
| copyArtifacts projectName: "${REF_DEP_CHECKOUT_NAME}" |
| unzip zipFile: "${DEPENDENCIES_ARCHIVE_NAME_ZIP}", |
| quiet: true |
| |
| sh '''#!/bin/bash |
| set -e |
| # Sanity check. Fail if data is untagged |
| if [ ! -f ${DEPENDENCIES_CONTENTS_FILE} ]; then |
| echo "Missing ${DEPENDENCIES_CONTENTS_FILE}, \\ |
| please check nightly checkout job." |
| exit 1 |
| else |
| echo "Parsing dependencies" |
| CMSIS_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} | \\ |
| jq -r ".[\\"cmsis\\"]\\ |
| [\\"${DEPENDENCIES_CMSIS_TAG}\\"][\\"dir\\"]") |
| |
| MBEDCRYPTO_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} \\ |
| | jq -r ".[\\"mbedcrypto\\"]\\ |
| [\\"${DEPENDENCIES_MBED_CRYPTO_TAG}\\"][\\"dir\\"]") |
| CHECKPATH_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} \\ |
| | jq -r ".[\\"checkpatch\\"]\\ |
| [\\"${DEPENDENCIES_CHECKPATCH_TAG}\\"][\\"dir\\"]") |
| |
| fi |
| |
| # Check that provided input tags exist |
| |
| # Create a labeled array for user selections |
| USER_TAG_LIST=(CHECKPATCH:"${DEPENDENCIES_CHECKPATCH_TAG}" \\ |
| MBED_CRYPTO:"${DEPENDENCIES_MBED_CRYPTO_TAG}" \\ |
| CMSIS:"${DEPENDENCIES_CMSIS_TAG}") |
| |
| # Get extracted data from DEPENDENCIES_CONTENTS_FILE. |
| USER_SEL_LIST="$CHECKPATH_DIR $MBEDCRYPTO_DIR" |
| USER_SEL_LIST="$USER_SEL_LIST $CMSIS_DIR" |
| IFS=' ' read -ra USR_SEL_LIST <<< "${USER_SEL_LIST}" |
| |
| TAG_COUNTER=0 |
| # Look for data not found in contents (jq out -> null) |
| for USR_SEL in "${USR_SEL_LIST[@]}"; do |
| echo "$USR_SEL > $TAG_COUNTER" |
| if [ "$USR_SEL" == "null" ]; then |
| |
| echo "Dependencies ERROR. "\\ |
| "TAG: '${USER_TAG_LIST[TAG_COUNTER]}' "\\ |
| "does not exist in checkout job." |
| exit 1 |
| fi |
| ((TAG_COUNTER+=1)) |
| done |
| |
| # FIX for Jenkins unzip clearing permissions |
| chmod +x $CHECKPATH_DIR/checkpatch.pl |
| |
| # Move the checked-out data to user-set directories |
| mv $CMSIS_DIR $CMSIS_WORK_DIR |
| |
| # When the directory name from nightlies matches with |
| # the expected work dir, non selected entries must be |
| #removed |
| mv $MBEDCRYPTO_DIR mbedcrypto_tmp |
| rm -rf mbed-crypto |
| mv $CHECKPATH_DIR checkpatch_tmp && rm -rf checkpatch |
| mv checkpatch_tmp $CHECKPATCH_WORK_DIR |
| mv mbedcrypto_tmp $MBED_CRYPTO_WORK_DIR |
| |
| |
| CONTENTS="cmsis=${DEPENDENCIES_CMSIS_TAG} \\ |
| mbedcrypto=${DEPENDENCIES_MBED_CRYPTO_TAG} \\ |
| checkpatch=${DEPENDENCIES_CHECKPATCH_TAG}" \\ |
| |
| if [ "${LAVA_TEST}" == "Enable" ]; then |
| echo "Fetching LAVA Dependancies" |
| # Prepare the FPGA image files |
| # Remove whitespace between commas |
| FPGA_IMAGES_TAG_LIST=$(echo ${FPGA_IMAGES_TAG_LIST} \\ |
| | sed 's/[[:space:]]*,[[:space:]]*/,/g') |
| # Split the string using the comma. |
| IFS=',' read -ra FPGA_IMAGES <<< \\ |
| "${FPGA_IMAGES_TAG_LIST}" |
| |
| # Download each commit tag |
| for FPGA_NAME in "${FPGA_IMAGES[@]}"; do |
| # Strip whitespace |
| FPGA_NAME=$(echo $FPGA_NAME | xargs) |
| |
| FPGA_IMAGE_F=$(cat \\ |
| ${DEPENDENCIES_CONTENTS_FILE} | \\ |
| jq -r ".[\\"fpga\\"]\\ |
| [\\"${FPGA_NAME}\\"][\\"recovery\\"]") |
| |
| if [ "$FPGA_IMAGE_F" == "null" ]; then |
| echo "Dependencies ERROR. "\\ |
| "FPGA TAG: '${FPGA_NAME}' does not "\\ |
| "exist in checkout job." |
| exit 1 |
| else |
| echo "Proccessing FPGA Image: ${FPGA_IMAGE_F}" |
| cp ${FPGA_IMAGE_F} ./ |
| fi |
| done |
| CONTENTS="${CONTENTS} fpga=${FPGA_IMAGES_TAG_LIST}" |
| fi |
| |
| if [ "${FASTMODEL_TEST}" == "Enable" ]; then |
| echo "Fetching Fastmodel Dependancies" |
| # Copy the selected Fastmodel directory |
| FPV_DIR=$(cat ${DEPENDENCIES_CONTENTS_FILE} | \\ |
| jq -r ".[\\"fastmodel\\"]\\ |
| [\\"${DEPENDENCIES_FPV_TAG}\\"][\\"dir\\"]") |
| |
| if [ "$FPV_DIR" == "null" ]; then |
| echo "Dependencies ERROR. "\\ |
| "FASTMODEL TAG: '${DEPENDENCIES_FPV_TAG}'" \\ |
| "does not exist in checkout job." |
| exit 1 |
| fi |
| # FIX for Jenkins unzip clearing permissions |
| chmod +x $FPV_DIR/FVP_MPS2_AEMv8M |
| |
| mv $FPV_DIR $FPV_WORK_DIR |
| |
| # Store the user selection to report |
| CONTENTS="${CONTENTS} fastmodel=${DEPENDENCIES_FPV_TAG}" |
| fi |
| |
| # Create a summary of the parameter provided versions |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --report ${DEPENDENCIES_CONTENTS_FILE} \\ |
| --filter-report ${CONTENTS} \\ |
| --output-file ${DEPENDENCIES_CONTENTS_FILE} |
| '''.stripIndent() |
| |
| stash includes: "${CMSIS_WORK_DIR}/**/*", |
| name: 'cmsis' |
| |
| stash includes: |
| "${CHECKPATCH_WORK_DIR}/**/*", |
| name: 'checkpatch' |
| |
| stash includes: |
| "${MBED_CRYPTO_WORK_DIR}/**/*", |
| name: 'mbedcrypto' |
| |
| stash includes: |
| "${DEPENDENCIES_CONTENTS_FILE}", |
| name: 'nightlies_info' |
| |
| script { |
| if (params.FASTMODEL_TEST.equals('Enable')) { |
| stash includes: "${FPV_WORK_DIR}/**/*", |
| name: 'fvp_binaries' |
| } |
| if (params.LAVA_TEST.equals('Enable')) { |
| archiveArtifacts artifacts: |
| "*.tar.gz", |
| onlyIfSuccessful: false |
| } |
| } /* script */ |
| |
| /* Always finish by setting the stage as SUCCESS */ |
| script { |
| checkout_nightlies_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| |
| } /* steps */ |
| post { |
| always { |
| echo "POST: Github (Nightlies)" |
| cleanWs() |
| } /* always */ |
| } /* post */ |
| } /* stage */ |
| |
| stage('tf-m') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_def_stage, unit:'MINUTES') |
| } |
| steps { |
| script { |
| failed_stage = 'Checkout Trusted Firmware-M' |
| } |
| /* Unstash the components from previous stages */ |
| unstash 'ci-scripts' |
| |
| checkout changelog: true, poll: false, scm: [ |
| $class: 'GitSCM', |
| branches: [[name: '${GERRIT_REFSPEC}']], |
| doGenerateSubmoduleConfigurations: false, |
| extensions: [ |
| [ |
| $class: 'SubmoduleOption', |
| disableSubmodules: false, |
| parentCredentials: false, |
| recursiveSubmodules: true, |
| reference: '', |
| trackingSubmodules: false |
| ], |
| [ |
| $class: 'RelativeTargetDirectory', |
| relativeTargetDir: "${TFM_DIR}" |
| ] |
| ], |
| submoduleCfg: [], |
| userRemoteConfigs: [[ |
| url: ("${GERRIT_URL}/" |
| + "${GERRIT_PROJECT}"), |
| refspec: ("refs/changes/*:" |
| + "refs/changes/*") |
| ]] |
| ] |
| sh '''#!/bin/bash |
| set -e |
| # Capture the git information |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --collect --git-info ${TFM_DIR} \\ |
| --output-file ${GIT_INFO_FILE} |
| |
| cat ${GIT_INFO_FILE} |
| '''.stripIndent() |
| |
| stash useDefaultExcludes: false, name: "${TFM_DIR}" |
| stash includes: ("${GIT_INFO_FILE}"), name: 'git_info' |
| |
| /* Always finish by setting the stage as SUCCESS */ |
| script { |
| checkout_tfm_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: Checkout Trusted Firmware-M" |
| cleanWs() |
| } /* always */ |
| } /* post */ |
| |
| } /* stage */ |
| |
| } /* parallel */ |
| } /* stage */ |
| |
| stage('Static Code Analysis') { |
| parallel { |
| stage('Cppcheck') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_def_stage, unit:'MINUTES') |
| } |
| steps { |
| /* Unstash the components from previous stages */ |
| unstash 'ci-scripts' |
| unstash 'cmsis' |
| unstash 'mbedcrypto' |
| unstash 'tf-m' |
| |
| /* Run cppcheck */ |
| sh '''#!/bin/bash |
| set -e |
| export PATH=$EXTRA_PATHS:$PATH |
| |
| # Run cpp-check |
| pushd ${TFM_DIR} |
| if [ -n "${GERRIT_PATCHSET_REVISION}" ]; then |
| # Run CppCheck againist files modified |
| echo CppCheck againist Change-set |
| ../${TFM_CI_SCR_DIR}/run-cppcheck.sh HEAD |
| else |
| # Run CppCheck againist the full code-base |
| echo CppCheck againist base |
| ../${TFM_CI_SCR_DIR}/run-cppcheck.sh |
| fi |
| popd |
| |
| # Parse the output into a report |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py\\ |
| --collect --cpp-check-xml \\ |
| ${TFM_DIR}/build-cppcheck/chk-config.xml \\ |
| ${TFM_DIR}/build-cppcheck/chk-src.xml \\ |
| --output-file ${CPPCHCK_SUMMARY_FILE} |
| |
| head -n -2 ${TFM_DIR}/build-cppcheck/chk-config.xml \\ |
| > cpp-join.xml |
| tail -n +5 ${TFM_DIR}/build-cppcheck/chk-src.xml \\ |
| >> cpp-join.xml |
| |
| #Create HTML report |
| cppcheck-htmlreport --source-encoding="iso8859-1" \\ |
| --title="TFM Cppcheck Report" --source-dir=./ \\ |
| --report-dir=./build-cppcheck-htmlreport/ \\ |
| --file=./cpp-join.xml |
| '''.stripIndent() |
| |
| stash includes: |
| ("${CPPCHCK_SUMMARY_FILE}," |
| + "${TFM_DIR}/build-cppcheck/chk-config.xml," |
| + "${TFM_DIR}/build-cppcheck/chk-src.xml"), |
| name: 'cppcheck_summary' |
| |
| /* Archive reports */ |
| zip dir: "build-cppcheck-htmlreport", |
| zipFile: 'cppcheck-htmlreport.zip', |
| archive: true |
| |
| archiveArtifacts artifacts: |
| "${CPPCHCK_SUMMARY_FILE}", |
| onlyIfSuccessful: false |
| |
| sh '''#!/bin/bash |
| set -e |
| # Do not exit pipeline if erros are detected. Generate |
| # a trigger file which will notify user |
| CPPCHECK_EVAL=$(python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --all-fields-must-pass \\ |
| --set-success-field 'success = True' \\ |
| --report ${CPPCHCK_SUMMARY_FILE} \\ |
| | tail -n 1) |
| |
| if [ "$CPPCHECK_EVAL" == "Evaluation failed" ]; then |
| echo "Cppcheck analysis FAILED" |
| touch cppcheck.eval_failed |
| fi |
| '''.stripIndent() |
| |
| /* Send email if */ |
| script { |
| if (fileExists('cppcheck.eval_failed')) { |
| if (env.GERRIT_PATCHSET_UPLOADER_EMAIL) { |
| emailext ( |
| subject: ("Job '${env.JOB_NAME}'build " |
| + "${env.BUILD_NUMBER} has " |
| + "failed Cppcheck check."), |
| body: ("Gerrit Change: " |
| + "${env.GERRIT_CHANGE_URL}.\n" |
| + "Please Check console output " |
| + " at: ${env.BUILD_URL}\nor " |
| + "summary report at: " |
| + "${env.BUILD_URL}/artifact/" |
| + "${CPPCHCK_SUMMARY_FILE}"), |
| to: "${env.GERRIT_PATCHSET_UPLOADER_EMAIL}", |
| ) |
| } |
| } |
| } /* script */ |
| /* Always finish by setting the stage as SUCCESS */ |
| script { |
| cppcheck_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: Cppcheck" |
| cleanWs() |
| } /* always */ |
| failure { |
| script { |
| failed_stage = 'Cppcheck' |
| } |
| } |
| } /* post */ |
| } /* stage */ |
| |
| stage('check-patch') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_def_stage, unit:'MINUTES') |
| } |
| steps { |
| |
| /* Unstash the components from previous stages */ |
| unstash 'tf-m' |
| unstash 'ci-scripts' |
| unstash 'checkpatch' |
| |
| //Run check-patch |
| sh '''#!/bin/bash |
| set -e |
| export PATH=$EXTRA_PATHS:$PATH |
| |
| # Run checkpatch |
| pushd ${TFM_DIR} |
| |
| if [ -n "${GERRIT_PATCHSET_REVISION}" ]; then |
| # Run Checkpatch againist files modified |
| echo Checkpatch againist change-set |
| ../${TFM_CI_SCR_DIR}/run-checkpatch.sh \\ |
| -l 1 -f ../${CKPATCH_DETAILS_FILE} \\ |
| -p ../${CHECKPATCH_WORK_DIR} |
| else |
| # Run Checkpatch againist the full code-base |
| echo Checkpatch againist base |
| ../${TFM_CI_SCR_DIR}/run-checkpatch.sh \\ |
| -l 0 -f ../${CKPATCH_DETAILS_FILE} \\ |
| -p ../${CHECKPATCH_WORK_DIR} |
| fi |
| popd |
| |
| # Create the report |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --collect \\ |
| --checkpatch-parse-f ${CKPATCH_DETAILS_FILE} \\ |
| --output-file ${CKPATCH_SUMMARY_FILE} |
| '''.stripIndent() |
| |
| archiveArtifacts artifacts: |
| "${CKPATCH_SUMMARY_FILE}", |
| onlyIfSuccessful: false |
| |
| stash includes: |
| "${CKPATCH_SUMMARY_FILE}", |
| name: 'checkpatch_summary' |
| |
| sh '''#!/bin/bash |
| set -e |
| # Do not exit pipeline if erros are detected. Generate |
| # a trigger file which will notify user |
| CHECPATCH_EVAL=$(python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --all-fields-must-pass \\ |
| --set-success-field 'success = True' \\ |
| --report ${CKPATCH_SUMMARY_FILE} \\ |
| | tail -n 1) |
| |
| if [ "$CHECPATCH_EVAL" == "Evaluation failed" ]; then |
| echo "Checkpatch analysis FAILED" |
| touch checkpatch.eval_failed |
| fi |
| '''.stripIndent() |
| |
| /* Send email if */ |
| script { |
| if (fileExists('checkpatch.eval_failed')) { |
| if (env.GERRIT_PATCHSET_UPLOADER_EMAIL) { |
| emailext ( |
| subject: ("Job '${env.JOB_NAME}'build " |
| + "${env.BUILD_NUMBER} has " |
| + "failed Checkpatch check."), |
| body: ("Gerrit Change: " |
| + "${env.GERRIT_CHANGE_URL}.\n" |
| + "Please Check console output " |
| + "at: ${env.BUILD_URL}\nor " |
| + "summary report at: " |
| + "${env.BUILD_URL}/artifact/" |
| + "${CKPATCH_SUMMARY_FILE}"), |
| to: "${env.GERRIT_PATCHSET_UPLOADER_EMAIL}", |
| ) |
| } |
| } |
| } /* script */ |
| |
| /* Always finish by setting the stage as SUCCESS */ |
| script { |
| checkpath_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: check-patch" |
| cleanWs() |
| } /* always */ |
| failure { |
| script { |
| failed_stage = 'check-patch' |
| } |
| } |
| } /* post */ |
| } /* stage */ |
| } /* parallel */ |
| } /* stage */ |
| |
| stage('Build') { |
| parallel { |
| stage('Configurations') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_build_stage, unit:'MINUTES') |
| } |
| steps { |
| script { |
| failed_stage = 'Build-Configurations' |
| } |
| /* Unstash the components from previous stages */ |
| unstash 'mbedcrypto' |
| unstash 'cmsis' |
| unstash 'tf-m' |
| unstash 'ci-scripts' |
| |
| //Run the build and generate a summary report file. |
| sh '''#!/bin/bash |
| set -e |
| export PATH=$EXTRA_PATHS:$PATH |
| |
| # Use build info from config if avaibale. |
| if [ -n "${BUILD_CONFIG_JSON}" ]; then |
| echo "Overriding build config with user provided one" |
| echo ${BUILD_CONFIG_JSON} > build_config.json |
| B_CONFIG="-f build_config.json" |
| else |
| B_CONFIG="--config full --install" |
| fi |
| |
| # Build the project combinations |
| python3 ${TFM_CI_SCR_DIR}/build_helper/build_helper.py \\ |
| -b ${BUILD_LOCATION} -s -l \\ |
| -p ${BUILD_PARALLEL_NO} -n ${BUILD_THREAD_NO} \\ |
| -r ${BUILD_SUMMARY_FILE} ${B_CONFIG} |
| '''.stripIndent() |
| |
| /* Archive artifacts before evaluating report. */ |
| archiveArtifacts artifacts: ("${BUILD_LOCATION}/" + |
| "*_*_*_*_*/install/**/*"), |
| excludes: '**/*.map', |
| onlyIfSuccessful: true |
| |
| /* Stash binaries to be used by nightly test jobs. */ |
| stash includes: ("${BUILD_LOCATION}/*_*_*_*_*/" + |
| "install/outputs/**/*.*"), |
| name: 'build_binaries' |
| |
| /* Stash summary to be used by upstream project. */ |
| stash includes: "${BUILD_SUMMARY_FILE}", |
| name: 'build_summary' |
| archiveArtifacts artifacts: "${BUILD_SUMMARY_FILE}" |
| |
| sh '''#!/bin/bash |
| set -e |
| # Parse the output report(exit(1) if build has failed) |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --all-fields-must-pass --error_if_failed \\ |
| --report ${BUILD_SUMMARY_FILE} |
| '''.stripIndent() |
| |
| script { |
| build_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: Build" |
| /* Save build logs */ |
| archiveArtifacts artifacts: "build-ci-all/*.log", |
| onlyIfSuccessful: false |
| cleanWs() |
| } /* always */ |
| } /* post */ |
| } /* stage */ |
| |
| stage('Documentation') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_build_stage, unit:'MINUTES') |
| } |
| when { |
| allOf { |
| environment name: 'BUILD_DOCS', value: 'Enable' |
| } |
| } |
| steps { |
| script { |
| failed_stage = 'Build-Documentation' |
| } |
| /* Unstash the components from previous stages */ |
| unstash 'mbedcrypto' |
| unstash 'cmsis' |
| unstash 'tf-m' |
| unstash 'ci-scripts' |
| |
| //Run the build and generate a summary report file. |
| sh '''#!/bin/bash |
| set -e |
| export PATH=$EXTRA_PATHS:$PATH |
| |
| # Build doncumentation |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/build_helper/build_helper.py \\ |
| -b ${BUILD_DOC_LOCATION} -l --config doxygen |
| '''.stripIndent() |
| |
| archiveArtifacts artifacts: ("${BUILD_DOC_LOCATION}/" + |
| "*/install/doc/**/*"), |
| onlyIfSuccessful: true |
| |
| script { |
| build_docs_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: Build Documentation" |
| cleanWs() |
| } /* always */ |
| } /* post */ |
| } /* stage */ |
| |
| } /* parallel */ |
| } /* stage */ |
| |
| stage('Tests') { |
| parallel { |
| stage('Fastmodel') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: 60, unit:'MINUTES') |
| } |
| when { |
| allOf { |
| environment name: 'FASTMODEL_TEST', value: 'Enable' |
| } |
| } |
| steps { |
| /* Unstash the components from previous stages */ |
| unstash 'ci-scripts' |
| unstash 'fvp_binaries' |
| unstash 'build_binaries' |
| unstash 'build_summary' |
| |
| script { |
| failed_stage = 'Fast Model Testing' |
| } |
| sh '''#!/bin/bash |
| set -e |
| python3 -u \\ |
| ${TFM_CI_SCR_DIR}/fastmodel_dispatcher/fastmodel_dispatcher.py \\ |
| --build_report ${BUILD_SUMMARY_FILE} \\ |
| --report ${FPV_SUMMARY_FILE} |
| |
| |
| # Filter out the test results |
| mkdir -p ${TEST_LOGS} |
| mv terminal*.log ${TEST_LOGS}/ |
| '''.stripIndent() |
| |
| stash includes: "${FPV_SUMMARY_FILE}", |
| name: 'fastmodel_summary' |
| archiveArtifacts artifacts: "${FPV_SUMMARY_FILE}" |
| archiveArtifacts artifacts: "${TEST_LOGS}/*" |
| |
| sh '''#!/bin/bash |
| set -e |
| # Parse the report |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --all-fields-must-pass --error_if_failed \\ |
| --set-success-field 'success = True' \\ |
| --report ${FPV_SUMMARY_FILE} |
| '''.stripIndent() |
| script { |
| fastmodel_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: Fastmodel Tests" |
| cleanWs() |
| } /* always */ |
| } /* post */ |
| } /* stage */ |
| |
| stage('Lava') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| environment { |
| /* Authentication token coming from host */ |
| LAVA_CREDENTIAL = credentials("${LAVA_CREDENTIAL_ID}") |
| |
| /* Helper function separates USR/PSW credentials. Asign |
| them to a more commonly used notation */ |
| LAVA_USER="${LAVA_CREDENTIAL_USR}" |
| LAVA_TOKEN="${LAVA_CREDENTIAL_PSW}" |
| |
| } |
| options { |
| timeout(time: timeout_test_stage, unit:'MINUTES') |
| } |
| when { |
| allOf { |
| environment name: 'LAVA_TEST', value: 'Enable' |
| } |
| } |
| steps { |
| script { |
| failed_stage = 'Lava test' |
| } |
| unstash 'ci-scripts' |
| |
| sh '''#!/bin/bash |
| set -e |
| # If no conifig is provided use the built'in |
| if [ -n "${LAVA_TEST_CONFIG_JSON}" ]; then |
| echo "Overriding test config." |
| echo ${LAVA_TEST_CONFIG_JSON} > test_config.json |
| T_CONFIG="--config-file test_config.json" |
| fi |
| |
| # Generate the lava definition file |
| python3 -u \\ |
| ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\ |
| --task-create-definition \\ |
| --create-definition-build-no ${BUILD_NUMBER} \\ |
| --override-jenkins-url ${JENKINS_URL} \\ |
| --override-jenkins-job ${JOB_NAME} \\ |
| --create-definition-output-file \\ |
| "${LAVA_TEST_DEFINITION}" \\ |
| ${T_CONFIG} |
| |
| # Do not submit invalid jobs |
| if [ ! -f "${LAVA_TEST_DEFINITION}" ]; then |
| echo "LAVA job file does not exist: ${JOB}" |
| exit -1 |
| fi |
| |
| # Convert timeout to seconds |
| DTOUT=$((${LAVA_DISPATCHER_TIMEOUT_MINS} * 60)) |
| |
| # Submit the job to LAVA |
| python3 -u \\ |
| ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\ |
| --task-dispatch "$LAVA_TEST_DEFINITION" \\ |
| --dispatch-timeout ${DTOUT} \\ |
| --dispatch-lava-url ${LAVA_URL} \\ |
| --dispatch-lava-job-results-file \\ |
| ${LAVA_JOB_RESULTS} \\ |
| --dispatch-lava_token_from_environ |
| |
| # If tokens are not provided withCredentials bidnings |
| # they can be manually set by: |
| # --dispatch-lava_token_usr |
| # --dispatch-lava_token_secret |
| |
| # Verify the results and store them into a summary |
| python3 \\ |
| ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\ |
| --task-lava-parse ${LAVA_JOB_RESULTS} \\ |
| --output-report "${LAVA_SUMMARY_FILE}" |
| '''.stripIndent() |
| |
| archiveArtifacts artifacts: |
| "${LAVA_JOB_RESULTS}," + |
| "${LAVA_TEST_DEFINITION}," + |
| "${LAVA_SUMMARY_FILE}", |
| onlyIfSuccessful: false |
| |
| stash includes: |
| ("${LAVA_SUMMARY_FILE}," |
| + "${LAVA_JOB_RESULTS}," |
| + "${LAVA_TEST_DEFINITION}"), |
| name: 'lava_summary' |
| |
| sh '''#!/bin/bash |
| set -e |
| # Parse the output report(exit(1) if build has failed) |
| python3 \\ |
| ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --all-fields-must-pass --error_if_failed \\ |
| --set-success-field 'success = True' \\ |
| --report ${LAVA_SUMMARY_FILE} |
| '''.stripIndent() |
| |
| script { |
| lava_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| script { |
| if (currentBuild.result == 'FAILURE') { |
| sh '''#!/bin/bash |
| # If no job has been submittied the file |
| # will not exist |
| if [ ! -f lava_job.id ]; then |
| echo "No LAVA job pending" |
| exit 0 |
| fi |
| |
| # Send the request to cancell the job |
| python3 -u \\ |
| ./${TFM_CI_SCR_DIR}/lava_helper/lava_helper.py \\ |
| --dispatch-lava-url ${LAVA_URL} \\ |
| --dispatch-cancel $(cat lava_job.id) \\ |
| --dispatch-lava_token_from_environ |
| '''.stripIndent() |
| |
| } else { |
| echo "POST: Lava test passed" |
| } |
| |
| cleanWs() |
| } /* script */ |
| } /* always */ |
| } /* post */ |
| } /* stage */ |
| } /* parallel */ |
| } /* stage */ |
| |
| stage('Proccess Artifacts & Report') { |
| agent { |
| node { |
| label docker_img_builder |
| } |
| } |
| options { |
| timeout(time: timeout_def_stage, unit:'MINUTES') |
| } |
| steps { |
| unstash 'ci_scripts_git_info' |
| unstash 'nightlies_info' |
| unstash 'git_info' |
| unstash 'build_summary' |
| unstash 'checkpatch_summary' |
| unstash 'cppcheck_summary' |
| |
| script { |
| failed_stage = 'Proccess Artifacts & Report' |
| |
| if (params.FASTMODEL_TEST.equals('Enable')) { |
| unstash 'fastmodel_summary' |
| } |
| if (params.LAVA_TEST.equals('Enable')) { |
| unstash 'lava_summary' |
| } |
| } /* script */ |
| //Create build result summary to be posted to gerrit. |
| //Basically contactenate build summary got from downstream jobs |
| //with a header inserted in between. |
| |
| /* Unstash the components from previous stages */ |
| unstash 'ci-scripts' |
| |
| sh '''#!/bin/bash |
| set -e |
| # Collect enviroment information |
| python3 ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --collect --jenkins-info \\ |
| --output-file ${JENKINS_ENV_INFO_FILE} |
| |
| # Merge all the reports into a summary |
| REPORT_LIST="cppcheck=${CPPCHCK_SUMMARY_FILE} \\ |
| checkpatch=${CKPATCH_SUMMARY_FILE} \\ |
| build=${BUILD_SUMMARY_FILE} \\ |
| tf-m=${GIT_INFO_FILE} \\ |
| ci-scripts=${CI_SCRIPTS_INFO_FILE} \\ |
| dependencies-info=${DEPENDENCIES_CONTENTS_FILE} \\ |
| jenkings-info=${JENKINS_ENV_INFO_FILE}" |
| |
| if [ -f "${LAVA_SUMMARY_FILE}" ]; then |
| REPORT_LIST="${REPORT_LIST} lava=${LAVA_SUMMARY_FILE}" |
| fi |
| |
| if [ -f "${FPV_SUMMARY_FILE}" ]; then |
| REPORT_LIST="${REPORT_LIST} fastmodel=${FPV_SUMMARY_FILE}" |
| fi |
| |
| python3 ${TFM_CI_SCR_DIR}/report_parser/report_parser.py \\ |
| --collate-report $REPORT_LIST \\ |
| --output-file ${SUMMARY_FILE} |
| '''.stripIndent() |
| |
| /* Archive summary to make it avaibale later. */ |
| archiveArtifacts artifacts: "${SUMMARY_FILE}" |
| |
| script { |
| def test_pass = false |
| |
| if (cppcheck_job_result == "SUCCESS" && \ |
| checkpath_job_result == "SUCCESS" && \ |
| build_job_result == "SUCCESS") { |
| |
| /* If a test is enabled but failed, then build |
| has failed */ |
| if (params.FASTMODEL_TEST.equals('Enable') && \ |
| fastmodel_job_result == "FAILURE") { |
| echo "model testing failed" |
| currentBuild.result = "FAILURE" |
| } |
| else if (params.LAVA_TEST.equals('Enable') && \ |
| lava_job_result == "FAILURE") { |
| echo "LAVA testing failed" |
| currentBuild.result = "FAILURE" |
| } |
| else { |
| echo "All jobs have been successfull" |
| } |
| } else { |
| echo "One or more jobs have failed" |
| echo "Cppcheck status:" |
| echo cppcheck_job_result |
| echo "Checkpatch status:" |
| echo checkpath_job_result |
| echo "Build status:" |
| echo build_job_result |
| echo "Model Test status:" |
| echo fastmodel_job_result |
| echo "LAVA Test status:" |
| echo lava_job_result |
| currentBuild.result = "FAILURE" |
| } |
| |
| artifact_job_result = 'SUCCESS' |
| failed_stage = '' |
| } |
| } /* steps */ |
| post { |
| always { |
| echo "POST: Proccess Artifacts & Report" |
| cleanWs() |
| } /* always */ |
| } /* post */ |
| } /* stage */ |
| } /* stages */ |
| post { |
| always { |
| cleanWs() |
| } /* always */ |
| failure { |
| script { |
| if (!(params.MAINTAINER_EMAIL.equals(''))) { |
| emailext ( |
| subject: ("Job '${env.JOB_NAME}'build " |
| + "${env.BUILD_NUMBER}, failed at stage: " |
| + "'${failed_stage}'"), |
| body: "Check console output at ${env.BUILD_URL}", |
| to: '${MAINTAINER_EMAIL}', |
| recipientProviders: [ |
| [$class: ('CulpritsRecipient' |
| + 'Provider')], |
| [$class: ('RequesterRecipient' |
| + 'Provider')] |
| ] |
| ) |
| } |
| } /* script */ |
| } /* failure */ |
| aborted { |
| script { |
| if (!(params.MAINTAINER_EMAIL.equals(''))) { |
| emailext ( |
| subject: ("Job '${env.JOB_NAME}'build " |
| + "${env.BUILD_NUMBER}, cancelled at stage: " |
| + "'${failed_stage}'"), |
| body: "Check console output at ${env.BUILD_URL}", |
| to: '${MAINTAINER_EMAIL}', |
| recipientProviders: [ |
| [$class: ('CulpritsRecipient' |
| + 'Provider')], |
| [$class: ('RequesterRecipient' |
| + 'Provider')] |
| ] |
| ) |
| } |
| } /* script */ |
| } /* failure */ |
| } /* post */ |
| } /* pipeline */ |