Harrison Mutai | 4126dc7 | 2021-11-23 11:34:41 +0000 | [diff] [blame] | 1 | #!/usr/bin/env bash |
| 2 | # |
| 3 | # Copyright (c) 2022 Arm Limited. All rights reserved. |
| 4 | # |
| 5 | # SPDX-License-Identifier: BSD-3-Clause |
| 6 | # |
| 7 | |
| 8 | set -euo pipefail |
| 9 | |
| 10 | # Build and Jenkins URL. |
| 11 | sub_build_url=${1} |
| 12 | job_name="${2:-"tf-a-ci-gateway"}" |
| 13 | filter=${3:-".*"} |
| 14 | |
| 15 | jenkins="${sub_build_url%%/job*}" |
| 16 | |
| 17 | # Utilise default paths to output files if none provided. |
| 18 | job_target="$(dirname ${sub_build_url#*/job/})" |
| 19 | PNGFILE=${PNGFILE:=${job_target}-result.png} |
| 20 | CSVFILE=${CSVFILE:=${job_target}-result.csv} |
| 21 | |
| 22 | # Remove csv output file if it exists to append to empty file |
| 23 | : > "${CSVFILE}" |
| 24 | |
| 25 | readarray -t sub_builds < <(curl -sSL "${sub_build_url}/api/json" | jq -Rr ' |
| 26 | fromjson? | [ |
| 27 | .subBuilds[]? | select(.jobName == "'${job_name}'") | .url |
| 28 | ] | .[]') |
| 29 | |
| 30 | # Get a csv list of relative paths to report.json, or "-" if no report.json |
| 31 | report_rel_paths_url="${jenkins}/{$(echo $(IFS=,; echo "${sub_builds[*]}"))}/api/json" |
| 32 | |
| 33 | readarray -t report_paths < <(curl -fsSL --fail-early "${report_rel_paths_url}" \ |
| 34 | | sed 's/--_curl_--.*$//' \ |
| 35 | | sed -e 's/^{/'$(printf "\x1e")'{/' \ |
| 36 | | jq -sr --seq ' |
| 37 | [ .[] |
| 38 | | [ .artifacts[]? |
| 39 | | select (.fileName == "report.json") |
| 40 | | .relativePath ] |
| 41 | | if length > 0 then .[] else "-" end ] |
| 42 | | .[]') |
| 43 | |
| 44 | # Combine sub build urls with relative path to "report.json" |
| 45 | # the empty entries are intentionally kept as -, so the output array can |
| 46 | # be mapped onto ${sub_build_list} |
| 47 | report_urls="$jenkins/{" |
| 48 | for i in "${!sub_builds[@]}" |
| 49 | do |
| 50 | if [[ ${report_paths[i]} != "-" ]] |
| 51 | then |
| 52 | report_urls="${report_urls}${sub_builds[i]}/artifact/${report_paths[i]}," |
| 53 | fi |
| 54 | done |
| 55 | |
| 56 | # Strip last comma and add closing brace |
| 57 | report_urls="${report_urls%?}}" |
| 58 | |
| 59 | # Get Child build information from each report.json. |
| 60 | readarray -t child_file_list_array < <(curl -sL "${report_urls}" -o -\ |
| 61 | | sed 's/--_curl_--.*$//' \ |
| 62 | | jq -sr --arg FILTER "${filter}" \ |
| 63 | '[.[] |
| 64 | | .job as $job |
| 65 | | [ .child_build_numbers?, [(.test_files[] |
| 66 | | sub("\\.test";"") |
| 67 | | split("%") as $config |
| 68 | | { group: $config[1], suite: $config[2]})]] |
| 69 | | transpose |
| 70 | | map( {($job + "/" + .[0]) : .[1]} | to_entries ) |
| 71 | | add |
| 72 | | map(select(.value.suite | test(".*"; "il")? |
| 73 | and (endswith("nil") |
| 74 | or endswith("norun-fip.dummy") | not))) |
| 75 | | if ( length > 0 ) |
| 76 | then .[] else empty end |
| 77 | | .value.group, (.value.suite | gsub("\\,nil";"")), .key] |
| 78 | | .[]') |
| 79 | |
| 80 | # These three arrays should be the same length, and values at the same index |
| 81 | # correspond to the same child build |
| 82 | declare -a tftf_keys tftf_suite tftf_group |
| 83 | |
| 84 | for i in $(seq 0 3 $((${#child_file_list_array[@]}-1))) ; do |
| 85 | tftf_group+=("${child_file_list_array[$i]}") |
| 86 | tftf_suite+=("${child_file_list_array[$i+1]}") |
| 87 | tftf_keys+=("${child_file_list_array[$i+2]}") |
| 88 | done |
| 89 | |
| 90 | |
| 91 | child_output_results_url="${jenkins}/job/{$(echo $(IFS=,; echo "${tftf_keys[*]}"))}/api/json" |
| 92 | |
| 93 | # Retrieve relative path to either "uart0_full.txt" (FVP) or |
| 94 | # "job_output.log" (LAVA) for each child job. Once again values where no match |
| 95 | # is found are intentionally kept as "-" so the array can be correlated with |
| 96 | # ${tftf_suite}. |
| 97 | readarray -t child_output_results < <(curl -fsSL --fail-early "$child_output_results_url" \ |
| 98 | | sed 's/}{/}\n{/g' \ |
| 99 | | jq -sr '[ .[] |
| 100 | | ([ .artifacts[]? |
| 101 | | select(.fileName == "uart0_full.txt" |
| 102 | or .fileName == "job_output.log" |
| 103 | or .fileName == "lava-uart0.log") ] |
| 104 | | if length > 0 |
| 105 | then .[0].relativePath else "-" end), .result ] |
| 106 | | .[]') |
| 107 | |
| 108 | # Combine job and child_build number with relative path to output file |
| 109 | testlog_urls="${jenkins}/job/{" |
| 110 | tftf_child_results=() |
| 111 | |
| 112 | for i in $(seq 0 2 $((${#child_output_results[@]}-1))) ; do |
| 113 | testlog_urls+="${tftf_keys[$((i/2))]}/artifact/${child_output_results[$i]}," |
| 114 | tftf_child_results+=(${child_output_results[$((i+1))]}) |
| 115 | done |
| 116 | |
| 117 | # Remove final comma and append a closing brace |
| 118 | testlog_urls="${testlog_urls%?}}" |
| 119 | |
| 120 | # Retrieve the log for each child with --include to also retrieve the HTTP |
| 121 | # header and grep for a block like: |
| 122 | # Tests Skipped : 125 |
| 123 | # Tests Passed : 45 |
| 124 | # Tests Failed : 0 |
| 125 | # Tests Crashed : 0 |
| 126 | # |
| 127 | # If none is found the line HTTP is used to delemit each entry |
| 128 | # |
| 129 | # Logs from Lava each message is wrapped with braces and has some pre-amble, |
| 130 | # which is removed with sed. |
| 131 | |
| 132 | tftf_result_keys=( |
| 133 | "TestGroup" "TestSuite" "URL" "Result" "Passed" "Failed" "Crashed" "Skipped" |
| 134 | ) |
| 135 | declare -A results_split="( $(for ord_ in ${tftf_result_keys[@]} ; do echo -n "[$ord_]=\"\" "; done))" |
| 136 | declare output_csv_str="" csv_row="" |
| 137 | |
| 138 | read -ra tftf_urls <<< "$(eval "echo ${testlog_urls}")" |
| 139 | |
| 140 | # FIXME adjust this so we can handle both LAVA logs |
| 141 | # for each test suite |
| 142 | # curl the result log if its not '-' |
| 143 | # remove debug information |
| 144 | # if results is none: |
| 145 | # use "Result" |
| 146 | # else |
| 147 | # read each key |
| 148 | # write row to csv |
| 149 | |
| 150 | # Sort results into rows: |
| 151 | for i in ${!tftf_suite[*]}; do |
| 152 | results_split["TestGroup"]="${tftf_group[$i]:-}" |
| 153 | results_split["TestSuite"]="\"${tftf_suite[$i]:-}\"" |
| 154 | results_split["URL"]="${tftf_urls[$i]:-}" |
| 155 | results_split["Result"]="${tftf_child_results[$i]:-}" |
| 156 | |
| 157 | # Skipped/Crashed are always zero if no test block is found |
| 158 | results_split["Skipped"]="0" |
| 159 | results_split["Crashed"]="0" |
| 160 | if [[ "${results_split["Result"]}" == "SUCCESS" ]]; |
| 161 | then |
| 162 | results_split["Passed"]="1" |
| 163 | results_split["Failed"]="0" |
| 164 | else |
| 165 | results_split["Passed"]="0" |
| 166 | results_split["Failed"]="1" |
| 167 | fi |
| 168 | |
| 169 | readarray -t raw_result < <(curl -sL --include "${results_split["URL"]}" \ |
| 170 | | sed 's/.*msg": "//g' \ |
| 171 | | grep --text -E "^Tests|HTTP\/") |
| 172 | |
| 173 | for line in "${raw_result[@]}"; do |
| 174 | if [[ "${line}" == Test* ]] |
| 175 | then |
| 176 | k=$(echo "${line}" | awk -F ' ' '{print $2}') |
| 177 | count="${line//[!0-9]/}" |
| 178 | results_split[$k]=$count |
| 179 | fi |
| 180 | done |
| 181 | |
| 182 | # Generate CSV row using array of ordinals to align with headers. |
| 183 | readarray -t row < <(for k in ${tftf_result_keys[@]} ; do echo "${results_split[$k]}"; done ) |
| 184 | output_csv_str="${output_csv_str} $(echo $(IFS=,; echo "${row[*]}"))" |
| 185 | unset results_split[{..}] row |
| 186 | done |
| 187 | |
| 188 | # Replace spaces in header with commas and print to the output file. |
| 189 | echo $(IFS=,; echo "${tftf_result_keys[*]}") > ${CSVFILE} |
| 190 | |
| 191 | # Sort Filenames alphabetically and store in csv for gnuplot |
| 192 | sorted=($(IFS=$' '; sort <<<$output_csv_str)) |
| 193 | printf "%b\n" "${sorted[@]}" >> ${CSVFILE} |
| 194 | |
| 195 | # Produce PNG image of graph using gnuplot and .plot description file |
| 196 | gnuplot -e "jenkins_id='$sub_build_url'" -c ${0%bash}plot \ |
| 197 | "$CSVFILE" > "$PNGFILE" |