Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | |
| 3 | """ report_parser.py: |
| 4 | |
| 5 | Report parser parses openci json reports and conveys the invormation in a |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 6 | one or more standard formats (To be implemented) |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 7 | |
| 8 | After all information is captured it validates the success/failure status |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 9 | and can change the script exit code for integration with standard CI |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 10 | executors. |
| 11 | """ |
| 12 | |
| 13 | from __future__ import print_function |
| 14 | |
| 15 | __copyright__ = """ |
| 16 | /* |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 17 | * Copyright (c) 2018-2020, Arm Limited. All rights reserved. |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 18 | * |
| 19 | * SPDX-License-Identifier: BSD-3-Clause |
| 20 | * |
| 21 | */ |
| 22 | """ |
Karl Zhang | 08681e6 | 2020-10-30 13:56:03 +0800 | [diff] [blame] | 23 | |
| 24 | __author__ = "tf-m@lists.trustedfirmware.org" |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 25 | __project__ = "Trusted Firmware-M Open CI" |
Xinyu Zhang | 06286a9 | 2021-07-22 14:00:51 +0800 | [diff] [blame] | 26 | __version__ = "1.4.0" |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 27 | |
| 28 | |
| 29 | import os |
| 30 | import re |
| 31 | import sys |
| 32 | import json |
| 33 | import argparse |
Dean Birch | f6aa3da | 2020-01-24 12:29:38 +0000 | [diff] [blame] | 34 | import xmltodict |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 35 | from pprint import pprint |
| 36 | |
| 37 | try: |
| 38 | from tfm_ci_pylib.utils import load_json, get_local_git_info, \ |
| 39 | save_json, list_subdirs, get_remote_git_info, \ |
Dean Birch | d34b803 | 2020-05-29 16:16:30 +0100 | [diff] [blame] | 40 | convert_git_ref_path |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 41 | except ImportError: |
| 42 | dir_path = os.path.dirname(os.path.realpath(__file__)) |
| 43 | sys.path.append(os.path.join(dir_path, "../")) |
| 44 | |
| 45 | from tfm_ci_pylib.utils import load_json, get_local_git_info, \ |
| 46 | save_json, list_subdirs, get_remote_git_info, \ |
Dean Birch | d34b803 | 2020-05-29 16:16:30 +0100 | [diff] [blame] | 47 | convert_git_ref_path |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 48 | |
| 49 | |
Dean Birch | f6aa3da | 2020-01-24 12:29:38 +0000 | [diff] [blame] | 50 | def xml_read(file): |
| 51 | """" Read the contects of an xml file and convert it to python object """ |
| 52 | |
| 53 | data = None |
| 54 | try: |
| 55 | with open(file, "r") as F: |
| 56 | data = xmltodict.parse(F.read()) |
| 57 | except Exception as E: |
| 58 | print("Error", E) |
| 59 | return data |
| 60 | |
| 61 | |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 62 | def split_keys(joint_arg, sep="="): |
| 63 | """ Split two keys spread by a separator, and return them as a tuple |
| 64 | with whitespace removed """ |
| 65 | |
| 66 | keys = joint_arg.split(sep) |
| 67 | |
| 68 | # Remove whitespace |
| 69 | keys = map(str.strip, list(keys)) |
| 70 | # If key contains the word True/False convert it. |
| 71 | keys = list(map(lambda x: |
| 72 | eval(x.title()) if x.lower() in ["true", "false"] else x, |
| 73 | keys)) |
| 74 | return keys |
| 75 | |
| 76 | |
| 77 | def dependencies_mdt_collect(path_list, |
| 78 | out_f=None, |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 79 | known_content_types=["mbedcrypto", |
| 80 | "cmsis", |
| 81 | "checkpatch", |
| 82 | "fpga", |
| 83 | "fastmodel"], |
Tamas Ban | 681834a | 2019-12-02 11:05:03 +0000 | [diff] [blame] | 84 | expected_paths=["mbedcrypto", |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 85 | "cmsis", |
| 86 | "checkpatch"]): |
| 87 | """ Collect dependencies checkout metadata. It creates a json report which |
| 88 | can be optionally exported to a file """ |
| 89 | |
| 90 | cpaths = {k: v for k, v in [n.split("=") for n in path_list]} |
| 91 | cwd = os.path.abspath(os.getcwd()) |
| 92 | |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 93 | # Test that all the required paths are present |
| 94 | intsec_set = set(expected_paths).intersection(set(cpaths.keys())) |
| 95 | if len(intsec_set) != len(set(expected_paths)): |
| 96 | _missing = set(expected_paths).difference(intsec_set) |
| 97 | err_msg = "Error missing core paths.\nRequired: %s\nPresent: %s" % ( |
| 98 | ",".join(_missing), ",".join(cpaths.keys()) |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 99 | ) |
| 100 | print(err_msg) |
| 101 | raise Exception(err_msg) |
| 102 | |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 103 | # Create a dataset for the entries of known data format |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 104 | known_data = {n: {} for n in |
| 105 | set(known_content_types).intersection(set(cpaths.keys()))} |
| 106 | |
| 107 | # Create a dataset for unexpected data entries of unknown format |
| 108 | extra_data = {n: {} |
| 109 | for n in set(cpaths.keys()).difference(set(known_data))} |
| 110 | |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 111 | for d in list_subdirs(cpaths["mbedcrypto"]): |
| 112 | print("mbed-crypto dir: ", d) |
| 113 | # if checkout directory name contains a git reference convert to short |
| 114 | d = convert_git_ref_path(d) |
| 115 | |
| 116 | git_info = get_local_git_info(d) |
| 117 | tag = os.path.split(git_info["dir"])[-1].split("-")[-1] |
| 118 | |
| 119 | # Absolute paths will not work in jenkins since it will change the |
| 120 | # workspaace directory between stages convert to relative path |
| 121 | git_info["dir"] = os.path.relpath(git_info["dir"], cwd) |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 122 | known_data["mbedcrypto"][tag] = git_info |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 123 | |
| 124 | for d in list_subdirs(cpaths["cmsis"]): |
| 125 | print("CMS subdir: ", d) |
| 126 | d = convert_git_ref_path(d) |
| 127 | git_info = get_local_git_info(d) |
| 128 | tag = os.path.split(git_info["dir"])[-1] |
| 129 | |
| 130 | # Absolute paths will not work in jenkins since it will change the |
| 131 | # workspaace directory between stages convert to relative path |
| 132 | git_info["dir"] = os.path.relpath(git_info["dir"], cwd) |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 133 | known_data["cmsis"][tag] = git_info |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 134 | |
| 135 | for d in list_subdirs(cpaths["checkpatch"]): |
| 136 | print("Checkpatch subdir:", d) |
| 137 | |
| 138 | with open(os.path.join(d, "version.info"), "r") as F: |
| 139 | url = F.readline().strip() |
| 140 | |
| 141 | git_info = get_remote_git_info(url) |
| 142 | d = convert_git_ref_path(d) |
| 143 | git_info['dir'] = d |
| 144 | tag = os.path.split(git_info["dir"])[-1].split("_")[-1] |
| 145 | |
| 146 | # Absolute paths will not work in jenkins since it will change the |
| 147 | # workspaace directory between stages convert to relative path |
| 148 | git_info["dir"] = os.path.relpath(git_info["dir"], cwd) |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 149 | known_data["checkpatch"][tag] = git_info |
| 150 | |
| 151 | if "fastmodel" in cpaths: |
| 152 | for d in list_subdirs(cpaths["fastmodel"]): |
| 153 | print("Fastmodel subdir:", d) |
| 154 | json_info = load_json(os.path.join(d, "version.info")) |
| 155 | json_info["dir"] = os.path.relpath(d, cwd) |
| 156 | |
| 157 | tag = json_info["version"] |
| 158 | # Absolute paths will not work in jenkins since it will change the |
| 159 | # workspaace directory between stages convert to relative path |
| 160 | known_data["fastmodel"][tag] = json_info |
| 161 | |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 162 | if "fpga" in cpaths: |
| 163 | for d in os.listdir(cpaths["fpga"]): |
| 164 | print("FPGA imagefile:", d) |
| 165 | if ".tar.gz" in d: |
| 166 | name = d.split(".tar.gz")[0] |
| 167 | platform, subsys, ver = name.split("_") |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 168 | known_data["fpga"][name] = {"platform": platform, |
| 169 | "subsys": subsys, |
| 170 | "version": ver, |
| 171 | "recovery": os.path.join( |
| 172 | cpaths["fpga"], |
| 173 | d)} |
Minos Galanakis | 2704622 | 2019-11-06 15:58:48 +0000 | [diff] [blame] | 174 | |
| 175 | #Attempt to detect what the unexpected paths contain |
| 176 | for e_path in extra_data.keys(): |
| 177 | for d in list_subdirs(cpaths[e_path]): |
| 178 | print("%s subdir: %s" % (e_path, d)) |
| 179 | # If it contains a version.info |
| 180 | if os.path.isfile(os.path.join(d, "version.info")): |
| 181 | json_info = load_json(os.path.join(d, "version.info")) |
| 182 | json_info["dir"] = os.path.relpath(d, cwd) |
| 183 | |
| 184 | tag = json_info["version"] |
| 185 | # Absolute paths will not work in jenkins since it will change |
| 186 | # the workspaace directory between stages convert to rel-path |
| 187 | extra_data[e_path][tag] = json_info |
| 188 | # If it contains git information |
| 189 | elif os.path.exists(os.path.join(d, ".git")): |
| 190 | d = convert_git_ref_path(d) |
| 191 | |
| 192 | git_info = get_local_git_info(d) |
| 193 | tag = os.path.split(git_info["dir"])[-1].split("-")[-1] |
| 194 | |
| 195 | # Absolute paths will not work in jenkins since it will change |
| 196 | # the workspaace directory between stages convert to rel-path |
| 197 | git_info["dir"] = os.path.relpath(git_info["dir"], cwd) |
| 198 | extra_data[e_path][tag] = git_info |
| 199 | # Do not break flow if detection fails |
| 200 | else: |
| 201 | print("Error determining contents of directory: %s/%s for " |
| 202 | "indexing purposes" % (e_path, d)) |
| 203 | extra_data[e_path][tag] = {"info": "N.A"} |
| 204 | |
| 205 | # Add the extra paths to the expected ones |
| 206 | for k, v in extra_data.items(): |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 207 | known_data[k] = v |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 208 | if out_f: |
| 209 | print("Exporting metadata to", out_f) |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 210 | save_json(out_f, known_data) |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 211 | else: |
Minos Galanakis | c885935 | 2020-03-10 16:55:30 +0000 | [diff] [blame] | 212 | pprint(known_data) |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 213 | |
| 214 | |
| 215 | def cppcheck_mdt_collect(file_list, out_f=None): |
| 216 | """ XML parse multiple cppcheck output files and create a json report """ |
| 217 | |
| 218 | xml_files = list(map(os.path.abspath, file_list)) |
| 219 | |
| 220 | dict_data = [] |
| 221 | version = None |
| 222 | for xf in xml_files: |
| 223 | data = xml_read(xf) |
| 224 | |
| 225 | version = data["results"]["cppcheck"]["@version"] |
| 226 | # If nothing is found the errors dictionary will be a Nonetype object |
| 227 | if data["results"]["errors"] is not None: |
| 228 | # Use json to flatten ordered dict |
| 229 | str_data = json.dumps(data["results"]["errors"]["error"]) |
| 230 | # Remove @ prefix on first char of files that cppcheck adds |
| 231 | str_data = str_data.replace("@", '') |
| 232 | |
| 233 | # Convert to dict again(xml to json will have added an array) |
| 234 | _dt = json.loads(str_data) |
| 235 | |
| 236 | if isinstance(_dt, list): |
| 237 | dict_data += _dt |
| 238 | # If only one error is foud it will give it as a single item |
| 239 | elif isinstance(_dt, dict): |
| 240 | dict_data += [_dt] |
| 241 | else: |
| 242 | print("Ignoring cpp entry %s of type %s" % (_dt, type(_dt))) |
| 243 | |
| 244 | out_data = {"_metadata_": {"cppcheck-version": version}, |
| 245 | "report": {}} |
| 246 | |
| 247 | for E in dict_data: |
| 248 | |
| 249 | sever = E.pop("severity") |
| 250 | |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 251 | # Sort it based on severity |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 252 | try: |
| 253 | out_data["report"][sever].append(E) |
| 254 | except KeyError: |
| 255 | out_data["report"][sever] = [E] |
| 256 | |
| 257 | _errors = 0 |
| 258 | for msg_sever, msg_sever_entries in out_data["report"].items(): |
| 259 | out_data["_metadata_"][msg_sever] = str(len(msg_sever_entries)) |
| 260 | if msg_sever == "error": |
| 261 | _errors = len(msg_sever_entries) |
| 262 | |
| 263 | out_data["_metadata_"]["success"] = True if not int(_errors) else False |
| 264 | |
| 265 | if out_f: |
| 266 | save_json(out_f, out_data) |
| 267 | else: |
| 268 | pprint(out_data) |
| 269 | |
| 270 | |
| 271 | def checkpatch_mdt_collect(file_name, out_f=None): |
| 272 | """ Regex parse a checpatch output file and create a report """ |
| 273 | |
| 274 | out_data = {"_metadata_": {"errors": 0, |
| 275 | "warnings": 0, |
| 276 | "lines": 0, |
| 277 | "success": True}, |
| 278 | "report": {} |
| 279 | } |
| 280 | with open(file_name, "r") as F: |
| 281 | cpatch_data = F.read().strip() |
| 282 | |
| 283 | # checkpatch will not report anything when no issues are found |
| 284 | if len(cpatch_data): |
| 285 | stat_rex = re.compile(r'^total: (\d+) errors, ' |
| 286 | r'(\d+) warnings, (\d+) lines', |
| 287 | re.MULTILINE) |
| 288 | line_rex = re.compile(r'([\S]+:)\s([\S]+:)\s([\S ]+)\n', re.MULTILINE) |
| 289 | ewl = stat_rex.search(cpatch_data) |
| 290 | try: |
| 291 | _errors, _warnings, _lines = ewl.groups() |
| 292 | except Exception as E: |
| 293 | print("Exception parsing checkpatch file.", E) |
| 294 | # If there is text but not in know format return -1 and fail job |
| 295 | _errors = _warnings = _lines = "-1" |
| 296 | checkpath_entries = line_rex.findall(cpatch_data) |
| 297 | |
| 298 | for en in checkpath_entries: |
| 299 | _file, _line, _ = en[0].split(":") |
Galanakis, Minos | c3e8c74 | 2019-12-02 16:18:50 +0000 | [diff] [blame] | 300 | try: |
| 301 | _type, _subtype, _ = en[1].split(":") |
| 302 | except Exception as e: |
| 303 | print("WARNING: Ignoring Malformed checkpatch line: %s" % |
| 304 | "".join(en)) |
| 305 | continue |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 306 | _msg = en[2] |
| 307 | |
| 308 | out_data["_metadata_"] = {"errors": _errors, |
| 309 | "warnings": _warnings, |
| 310 | "lines": _lines, |
| 311 | "success": True if not int(_errors) |
| 312 | else False} |
| 313 | |
| 314 | E = {"id": _subtype, |
| 315 | "verbose": _subtype, |
| 316 | "msg": _msg, |
| 317 | "location": {"file": _file, "line": _line} |
| 318 | } |
| 319 | try: |
| 320 | out_data["report"][_type.lower()].append(E) |
| 321 | except KeyError: |
| 322 | out_data["report"][_type.lower()] = [E] |
| 323 | |
| 324 | if out_f: |
| 325 | save_json(out_f, out_data) |
| 326 | else: |
| 327 | pprint(out_data) |
| 328 | |
| 329 | |
| 330 | def jenkins_mdt_collect(out_f): |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 331 | """ Collects Jenkins environment information and stores |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 332 | it in a key value list """ |
| 333 | |
| 334 | # Jenkins environment parameters are always valid |
| 335 | jenkins_env_keys = ["BUILD_ID", |
| 336 | "BUILD_URL", |
| 337 | "JOB_BASE_NAME", |
| 338 | "GERRIT_URL", |
| 339 | "GERRIT_PROJECT"] |
| 340 | # The following Gerrit parameters only exist when |
| 341 | # a job is triggered by a web hook |
| 342 | gerrit_trigger_keys = ["GERRIT_CHANGE_NUMBER", |
| 343 | "GERRIT_CHANGE_SUBJECT", |
| 344 | "GERRIT_CHANGE_ID", |
| 345 | "GERRIT_PATCHSET_REVISION", |
| 346 | "GERRIT_PATCHSET_NUMBER", |
| 347 | "GERRIT_REFSPEC", |
| 348 | "GERRIT_CHANGE_URL", |
| 349 | "GERRIT_BRANCH", |
| 350 | "GERRIT_CHANGE_OWNER_EMAIL", |
| 351 | "GERRIT_PATCHSET_UPLOADER_EMAIL"] |
| 352 | |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 353 | # Find as mamny of the variables in environment |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 354 | el = set(os.environ).intersection(set(jenkins_env_keys + |
| 355 | gerrit_trigger_keys)) |
| 356 | # Format it in key:value pairs |
| 357 | out_data = {n: os.environ[n] for n in el} |
| 358 | if out_f: |
| 359 | save_json(out_f, out_data) |
| 360 | else: |
| 361 | pprint(out_data) |
| 362 | |
| 363 | |
| 364 | def metadata_collect(user_args): |
| 365 | """ Logic for information collection during different stages of |
| 366 | the build """ |
| 367 | |
| 368 | if user_args.dependencies_checkout and user_args.content_paths: |
| 369 | dependencies_mdt_collect(user_args.content_paths, |
| 370 | user_args.out_f) |
| 371 | elif user_args.git_info: |
| 372 | git_info = get_local_git_info(os.path.abspath(user_args.git_info)) |
| 373 | |
| 374 | if user_args.out_f: |
| 375 | save_json(user_args.out_f, git_info) |
| 376 | else: |
| 377 | pprint(git_info) |
| 378 | elif user_args.cppcheck_files: |
| 379 | cppcheck_mdt_collect(user_args.cppcheck_files, user_args.out_f) |
| 380 | elif user_args.checkpatch_file: |
| 381 | checkpatch_mdt_collect(user_args.checkpatch_file, user_args.out_f) |
| 382 | elif user_args.jenkins_info: |
| 383 | jenkins_mdt_collect(user_args.out_f) |
| 384 | else: |
| 385 | print("Invalid Metadata collection arguments") |
| 386 | print(user_args) |
| 387 | sys.exit(1) |
| 388 | |
| 389 | |
| 390 | def collate_report(key_file_list, ouput_f=None, stdout=True): |
| 391 | """ Join different types of json formatted reports into one """ |
| 392 | |
| 393 | out_data = {"_metadata_": {}, "report": {}} |
| 394 | for kf in key_file_list: |
| 395 | try: |
| 396 | key, fl = kf.split("=") |
| 397 | data = load_json(fl) |
| 398 | # If data is a standard reprort (metdata-report parse it) |
| 399 | if ("_metadata_" in data.keys() and "report" in data.keys()): |
| 400 | out_data["_metadata_"][key] = data["_metadata_"] |
| 401 | out_data["report"][key] = data["report"] |
| 402 | # Else treat it as a raw information passing dataset |
| 403 | else: |
| 404 | try: |
| 405 | out_data["info"][key] = data |
| 406 | except KeyError as E: |
| 407 | out_data["info"] = {key: data} |
| 408 | except Exception as E: |
| 409 | print("Exception parsing argument", kf, E) |
| 410 | continue |
| 411 | if ouput_f: |
| 412 | save_json(ouput_f, out_data) |
| 413 | elif stdout: |
| 414 | pprint(out_data) |
| 415 | return out_data |
| 416 | |
| 417 | |
| 418 | def filter_report(key_value_list, input_f, ouput_f): |
| 419 | """ Generates a subset of the data contained in |
| 420 | input_f, by selecting only the values defined in key_value list """ |
| 421 | |
| 422 | try: |
| 423 | rep_data = load_json(input_f) |
| 424 | except Exception as E: |
| 425 | print("Exception parsing ", input_f, E) |
| 426 | sys.exit(1) |
| 427 | |
| 428 | out_data = {} |
| 429 | for kf in key_value_list: |
| 430 | try: |
| 431 | tag, value = kf.split("=") |
| 432 | # if multiple selection |
| 433 | if(",") in value: |
| 434 | out_data[tag] = {} |
| 435 | for v in value.split(","): |
| 436 | data = rep_data[tag][v] |
| 437 | out_data[tag][v] = data |
| 438 | else: |
| 439 | data = rep_data[tag][value] |
| 440 | out_data[tag] = {value: data} |
| 441 | except Exception as E: |
| 442 | print("Could not extract data-set for k: %s v: %s" % (tag, value)) |
| 443 | print(E) |
| 444 | continue |
| 445 | if ouput_f: |
| 446 | save_json(ouput_f, out_data) |
| 447 | else: |
| 448 | pprint(out_data) |
| 449 | |
| 450 | |
| 451 | def parse_report(user_args): |
| 452 | """ Parse a report and attempt to determine if it is overall successful or |
| 453 | not. It will set the script's exit code accordingly """ |
| 454 | |
| 455 | # Parse Mode |
| 456 | in_rep = load_json(user_args.report) |
| 457 | report_eval = None |
| 458 | |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 459 | # Extract the required condition for evaluation to pass |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 460 | pass_key, pass_val = split_keys(user_args.set_pass) |
| 461 | |
| 462 | print("Evaluation will succeed if \"%s\" is \"%s\"" % (pass_key, |
| 463 | pass_val)) |
| 464 | try: |
| 465 | report_eval = in_rep["_metadata_"][pass_key] == pass_val |
| 466 | print("Evaluating detected '%s' field in _metaddata_. " % pass_key) |
| 467 | except Exception as E: |
| 468 | pass |
| 469 | |
| 470 | if report_eval is None: |
| 471 | if isinstance(in_rep, dict): |
| 472 | # If report contains an overall success field in metadata do not |
| 473 | # parse the items |
| 474 | in_rep = in_rep["report"] |
| 475 | ev_list = in_rep.values() |
| 476 | elif isinstance(in_rep, list): |
| 477 | ev_list = in_rep |
| 478 | else: |
| 479 | print("Invalid data type: %s" % type(in_rep)) |
| 480 | return |
| 481 | |
| 482 | if user_args.onepass: |
| 483 | try: |
| 484 | report_eval = in_rep[user_args.onepass][pass_key] == pass_val |
| 485 | except Exception as e: |
| 486 | report_eval = False |
| 487 | |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 488 | # If every single field needs to be succesfful, invert the check and |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 489 | # look for those who are not |
| 490 | elif user_args.allpass: |
| 491 | try: |
| 492 | if list(filter(lambda x: x[pass_key] != pass_val, ev_list)): |
| 493 | pass |
| 494 | else: |
| 495 | report_eval = True |
| 496 | except Exception as e: |
| 497 | print(e) |
| 498 | report_eval = False |
| 499 | else: |
| 500 | print("Evaluation condition not set. Please use -a or -o. Launch" |
| 501 | "help (-h) for more information") |
| 502 | |
| 503 | print("Evaluation %s" % ("passed" if report_eval else "failed")) |
| 504 | if user_args.eif: |
| 505 | print("Setting script exit status") |
| 506 | sys.exit(0 if report_eval else 1) |
| 507 | |
| 508 | |
| 509 | def main(user_args): |
| 510 | """ Main logic """ |
| 511 | |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 512 | # Metadata Collect Mode |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 513 | if user_args.collect: |
| 514 | metadata_collect(user_args) |
| 515 | return |
| 516 | elif user_args.filter_report: |
| 517 | filter_report(user_args.filter_report, |
| 518 | user_args.report, |
| 519 | user_args.out_f) |
| 520 | elif user_args.collate_report: |
| 521 | collate_report(user_args.collate_report, user_args.out_f) |
| 522 | else: |
| 523 | parse_report(user_args) |
| 524 | |
| 525 | |
| 526 | def get_cmd_args(): |
| 527 | """ Parse command line arguments """ |
| 528 | |
| 529 | # Parse command line arguments to override config |
| 530 | parser = argparse.ArgumentParser(description="TFM Report Parser.") |
| 531 | parser.add_argument("-e", "--error_if_failed", |
| 532 | dest="eif", |
| 533 | action="store_true", |
| 534 | help="If set will change the script exit code") |
| 535 | parser.add_argument("-s", "--set-success-field", |
| 536 | dest="set_pass", |
| 537 | default="status = Success", |
| 538 | action="store", |
| 539 | help="Set the key which the script will use to" |
| 540 | "assert success/failure") |
| 541 | parser.add_argument("-a", "--all-fields-must-pass", |
| 542 | dest="allpass", |
| 543 | action="store_true", |
| 544 | help="When set and a list is provided, all entries" |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 545 | "must be successful for evaluation to pass") |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 546 | parser.add_argument("-o", "--one-field-must-pass", |
| 547 | dest="onepass", |
| 548 | action="store", |
| 549 | help="Only the user defined field must pass") |
| 550 | parser.add_argument("-r", "--report", |
| 551 | dest="report", |
| 552 | action="store", |
| 553 | help="JSON file containing input report") |
| 554 | parser.add_argument("-c", "--collect", |
| 555 | dest="collect", |
| 556 | action="store_true", |
| 557 | help="When set, the parser will attempt to collect" |
| 558 | "information and produce a report") |
| 559 | parser.add_argument("-d", "--dependencies-checkout", |
| 560 | dest="dependencies_checkout", |
| 561 | action="store_true", |
| 562 | help="Collect information from a dependencies " |
| 563 | "checkout job") |
| 564 | parser.add_argument("-f", "--output-file", |
| 565 | dest="out_f", |
| 566 | action="store", |
| 567 | help="Output file to store captured information") |
| 568 | parser.add_argument('-p', '--content-paths', |
| 569 | dest="content_paths", |
| 570 | nargs='*', |
| 571 | help=("Pass a space separated list of paths in the" |
| 572 | "following format: -p mbedtls=/yourpath/" |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 573 | "fpv=/another/path .Used in conjunction with -n")) |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 574 | parser.add_argument("-g", "--git-info", |
| 575 | dest="git_info", |
| 576 | action="store", |
| 577 | help="Extract git information from given path. " |
| 578 | "Requires --colect directive. Optional parameter" |
| 579 | "--output-file ") |
| 580 | parser.add_argument("-x", "--cpp-check-xml", |
| 581 | dest="cppcheck_files", |
| 582 | nargs='*', |
| 583 | action="store", |
| 584 | help="Extract cppcheck static analysis information " |
| 585 | " output files, provided as a space separated " |
| 586 | "list. Requires --colect directive." |
| 587 | " Optional parameter --output-file ") |
| 588 | parser.add_argument("-z", "--checkpatch-parse-f", |
| 589 | dest="checkpatch_file", |
| 590 | action="store", |
| 591 | help="Extract checkpatch static analysis information " |
| 592 | " output file. Requires --colect directive." |
| 593 | " Optional parameter --output-file ") |
| 594 | parser.add_argument("-j", "--jenkins-info", |
| 595 | dest="jenkins_info", |
| 596 | action="store_true", |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 597 | help="Extract jenkings and gerrit trigger environment " |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 598 | "information fr. Requires --colect directive." |
| 599 | " Optional parameter --output-file ") |
| 600 | parser.add_argument("-l", "--collate-report", |
| 601 | dest="collate_report", |
| 602 | action="store", |
| 603 | nargs='*', |
| 604 | help="Pass a space separated list of key-value pairs" |
| 605 | "following format: -l report_key_0=report_file_0" |
| 606 | " report_key_1=report_file_1. Collate will " |
| 607 | "generate a joint dataset and print it to stdout." |
| 608 | "Optional parameter --output-file ") |
| 609 | parser.add_argument("-t", "--filter-report", |
| 610 | dest="filter_report", |
| 611 | action="store", |
| 612 | nargs='*', |
| 613 | help="Requires --report parameter for input file." |
| 614 | "Pass a space separated list of key-value pairs" |
| 615 | "following format: -l report_key_0=value_0" |
| 616 | " report_key_1=value_0. Filter will remote all" |
| 617 | "entries of the original report but the ones" |
Nicola Mazzucato | 935f9cb | 2025-05-16 17:21:07 +0100 | [diff] [blame^] | 618 | "matching the key:value pairs defined and print it" |
Minos Galanakis | ea42123 | 2019-06-20 17:11:28 +0100 | [diff] [blame] | 619 | "to stdout.Optional parameter --output-file") |
| 620 | return parser.parse_args() |
| 621 | |
| 622 | |
| 623 | if __name__ == "__main__": |
| 624 | main(get_cmd_args()) |