blob: 95af3f25bab1f35bd2c2d1e1a2fa92e21a210de3 [file] [log] [blame]
Minos Galanakisea421232019-06-20 17:11:28 +01001#!/usr/bin/env python3
2
3""" report_parser.py:
4
5 Report parser parses openci json reports and conveys the invormation in a
6 one or more standard formats (To be implememented)
7
8 After all information is captured it validates the success/failure status
9 and can change the script exit code for intergration with standard CI
10 executors.
11 """
12
13from __future__ import print_function
14
15__copyright__ = """
16/*
Minos Galanakisc8859352020-03-10 16:55:30 +000017 * Copyright (c) 2018-2020, Arm Limited. All rights reserved.
Minos Galanakisea421232019-06-20 17:11:28 +010018 *
19 * SPDX-License-Identifier: BSD-3-Clause
20 *
21 */
22 """
23__author__ = "Minos Galanakis"
24__email__ = "minos.galanakis@linaro.org"
25__project__ = "Trusted Firmware-M Open CI"
26__status__ = "stable"
27__version__ = "1.1"
28
29
30import os
31import re
32import sys
33import json
34import argparse
Dean Birchf6aa3da2020-01-24 12:29:38 +000035import xmltodict
Minos Galanakisea421232019-06-20 17:11:28 +010036from pprint import pprint
37
38try:
39 from tfm_ci_pylib.utils import load_json, get_local_git_info, \
40 save_json, list_subdirs, get_remote_git_info, \
41 convert_git_ref_path, xml_read
42except ImportError:
43 dir_path = os.path.dirname(os.path.realpath(__file__))
44 sys.path.append(os.path.join(dir_path, "../"))
45
46 from tfm_ci_pylib.utils import load_json, get_local_git_info, \
47 save_json, list_subdirs, get_remote_git_info, \
48 convert_git_ref_path, xml_read
49
50
Dean Birchf6aa3da2020-01-24 12:29:38 +000051def xml_read(file):
52 """" Read the contects of an xml file and convert it to python object """
53
54 data = None
55 try:
56 with open(file, "r") as F:
57 data = xmltodict.parse(F.read())
58 except Exception as E:
59 print("Error", E)
60 return data
61
62
Minos Galanakisea421232019-06-20 17:11:28 +010063def split_keys(joint_arg, sep="="):
64 """ Split two keys spread by a separator, and return them as a tuple
65 with whitespace removed """
66
67 keys = joint_arg.split(sep)
68
69 # Remove whitespace
70 keys = map(str.strip, list(keys))
71 # If key contains the word True/False convert it.
72 keys = list(map(lambda x:
73 eval(x.title()) if x.lower() in ["true", "false"] else x,
74 keys))
75 return keys
76
77
78def dependencies_mdt_collect(path_list,
79 out_f=None,
Minos Galanakisc8859352020-03-10 16:55:30 +000080 known_content_types=["mbedcrypto",
81 "cmsis",
82 "checkpatch",
83 "fpga",
84 "fastmodel"],
Tamas Ban681834a2019-12-02 11:05:03 +000085 expected_paths=["mbedcrypto",
Minos Galanakisea421232019-06-20 17:11:28 +010086 "cmsis",
87 "checkpatch"]):
88 """ Collect dependencies checkout metadata. It creates a json report which
89 can be optionally exported to a file """
90
91 cpaths = {k: v for k, v in [n.split("=") for n in path_list]}
92 cwd = os.path.abspath(os.getcwd())
93
Minos Galanakisc8859352020-03-10 16:55:30 +000094 # Test that all the required paths are present
95 intsec_set = set(expected_paths).intersection(set(cpaths.keys()))
96 if len(intsec_set) != len(set(expected_paths)):
97 _missing = set(expected_paths).difference(intsec_set)
98 err_msg = "Error missing core paths.\nRequired: %s\nPresent: %s" % (
99 ",".join(_missing), ",".join(cpaths.keys())
Minos Galanakisea421232019-06-20 17:11:28 +0100100 )
101 print(err_msg)
102 raise Exception(err_msg)
103
Minos Galanakisc8859352020-03-10 16:55:30 +0000104 # Create a dataset for the entires of known data format
105 known_data = {n: {} for n in
106 set(known_content_types).intersection(set(cpaths.keys()))}
107
108 # Create a dataset for unexpected data entries of unknown format
109 extra_data = {n: {}
110 for n in set(cpaths.keys()).difference(set(known_data))}
111
Minos Galanakisea421232019-06-20 17:11:28 +0100112 for d in list_subdirs(cpaths["mbedcrypto"]):
113 print("mbed-crypto dir: ", d)
114 # if checkout directory name contains a git reference convert to short
115 d = convert_git_ref_path(d)
116
117 git_info = get_local_git_info(d)
118 tag = os.path.split(git_info["dir"])[-1].split("-")[-1]
119
120 # Absolute paths will not work in jenkins since it will change the
121 # workspaace directory between stages convert to relative path
122 git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
Minos Galanakisc8859352020-03-10 16:55:30 +0000123 known_data["mbedcrypto"][tag] = git_info
Minos Galanakisea421232019-06-20 17:11:28 +0100124
125 for d in list_subdirs(cpaths["cmsis"]):
126 print("CMS subdir: ", d)
127 d = convert_git_ref_path(d)
128 git_info = get_local_git_info(d)
129 tag = os.path.split(git_info["dir"])[-1]
130
131 # Absolute paths will not work in jenkins since it will change the
132 # workspaace directory between stages convert to relative path
133 git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
Minos Galanakisc8859352020-03-10 16:55:30 +0000134 known_data["cmsis"][tag] = git_info
Minos Galanakisea421232019-06-20 17:11:28 +0100135
136 for d in list_subdirs(cpaths["checkpatch"]):
137 print("Checkpatch subdir:", d)
138
139 with open(os.path.join(d, "version.info"), "r") as F:
140 url = F.readline().strip()
141
142 git_info = get_remote_git_info(url)
143 d = convert_git_ref_path(d)
144 git_info['dir'] = d
145 tag = os.path.split(git_info["dir"])[-1].split("_")[-1]
146
147 # Absolute paths will not work in jenkins since it will change the
148 # workspaace directory between stages convert to relative path
149 git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
Minos Galanakisc8859352020-03-10 16:55:30 +0000150 known_data["checkpatch"][tag] = git_info
151
152 if "fastmodel" in cpaths:
153 for d in list_subdirs(cpaths["fastmodel"]):
154 print("Fastmodel subdir:", d)
155 json_info = load_json(os.path.join(d, "version.info"))
156 json_info["dir"] = os.path.relpath(d, cwd)
157
158 tag = json_info["version"]
159 # Absolute paths will not work in jenkins since it will change the
160 # workspaace directory between stages convert to relative path
161 known_data["fastmodel"][tag] = json_info
162
Minos Galanakisea421232019-06-20 17:11:28 +0100163 if "fpga" in cpaths:
164 for d in os.listdir(cpaths["fpga"]):
165 print("FPGA imagefile:", d)
166 if ".tar.gz" in d:
167 name = d.split(".tar.gz")[0]
168 platform, subsys, ver = name.split("_")
Minos Galanakisc8859352020-03-10 16:55:30 +0000169 known_data["fpga"][name] = {"platform": platform,
170 "subsys": subsys,
171 "version": ver,
172 "recovery": os.path.join(
173 cpaths["fpga"],
174 d)}
Minos Galanakis27046222019-11-06 15:58:48 +0000175
176 #Attempt to detect what the unexpected paths contain
177 for e_path in extra_data.keys():
178 for d in list_subdirs(cpaths[e_path]):
179 print("%s subdir: %s" % (e_path, d))
180 # If it contains a version.info
181 if os.path.isfile(os.path.join(d, "version.info")):
182 json_info = load_json(os.path.join(d, "version.info"))
183 json_info["dir"] = os.path.relpath(d, cwd)
184
185 tag = json_info["version"]
186 # Absolute paths will not work in jenkins since it will change
187 # the workspaace directory between stages convert to rel-path
188 extra_data[e_path][tag] = json_info
189 # If it contains git information
190 elif os.path.exists(os.path.join(d, ".git")):
191 d = convert_git_ref_path(d)
192
193 git_info = get_local_git_info(d)
194 tag = os.path.split(git_info["dir"])[-1].split("-")[-1]
195
196 # Absolute paths will not work in jenkins since it will change
197 # the workspaace directory between stages convert to rel-path
198 git_info["dir"] = os.path.relpath(git_info["dir"], cwd)
199 extra_data[e_path][tag] = git_info
200 # Do not break flow if detection fails
201 else:
202 print("Error determining contents of directory: %s/%s for "
203 "indexing purposes" % (e_path, d))
204 extra_data[e_path][tag] = {"info": "N.A"}
205
206 # Add the extra paths to the expected ones
207 for k, v in extra_data.items():
Minos Galanakisc8859352020-03-10 16:55:30 +0000208 known_data[k] = v
Minos Galanakisea421232019-06-20 17:11:28 +0100209 if out_f:
210 print("Exporting metadata to", out_f)
Minos Galanakisc8859352020-03-10 16:55:30 +0000211 save_json(out_f, known_data)
Minos Galanakisea421232019-06-20 17:11:28 +0100212 else:
Minos Galanakisc8859352020-03-10 16:55:30 +0000213 pprint(known_data)
Minos Galanakisea421232019-06-20 17:11:28 +0100214
215
216def cppcheck_mdt_collect(file_list, out_f=None):
217 """ XML parse multiple cppcheck output files and create a json report """
218
219 xml_files = list(map(os.path.abspath, file_list))
220
221 dict_data = []
222 version = None
223 for xf in xml_files:
224 data = xml_read(xf)
225
226 version = data["results"]["cppcheck"]["@version"]
227 # If nothing is found the errors dictionary will be a Nonetype object
228 if data["results"]["errors"] is not None:
229 # Use json to flatten ordered dict
230 str_data = json.dumps(data["results"]["errors"]["error"])
231 # Remove @ prefix on first char of files that cppcheck adds
232 str_data = str_data.replace("@", '')
233
234 # Convert to dict again(xml to json will have added an array)
235 _dt = json.loads(str_data)
236
237 if isinstance(_dt, list):
238 dict_data += _dt
239 # If only one error is foud it will give it as a single item
240 elif isinstance(_dt, dict):
241 dict_data += [_dt]
242 else:
243 print("Ignoring cpp entry %s of type %s" % (_dt, type(_dt)))
244
245 out_data = {"_metadata_": {"cppcheck-version": version},
246 "report": {}}
247
248 for E in dict_data:
249
250 sever = E.pop("severity")
251
252 # Sort it based on serverity
253 try:
254 out_data["report"][sever].append(E)
255 except KeyError:
256 out_data["report"][sever] = [E]
257
258 _errors = 0
259 for msg_sever, msg_sever_entries in out_data["report"].items():
260 out_data["_metadata_"][msg_sever] = str(len(msg_sever_entries))
261 if msg_sever == "error":
262 _errors = len(msg_sever_entries)
263
264 out_data["_metadata_"]["success"] = True if not int(_errors) else False
265
266 if out_f:
267 save_json(out_f, out_data)
268 else:
269 pprint(out_data)
270
271
272def checkpatch_mdt_collect(file_name, out_f=None):
273 """ Regex parse a checpatch output file and create a report """
274
275 out_data = {"_metadata_": {"errors": 0,
276 "warnings": 0,
277 "lines": 0,
278 "success": True},
279 "report": {}
280 }
281 with open(file_name, "r") as F:
282 cpatch_data = F.read().strip()
283
284 # checkpatch will not report anything when no issues are found
285 if len(cpatch_data):
286 stat_rex = re.compile(r'^total: (\d+) errors, '
287 r'(\d+) warnings, (\d+) lines',
288 re.MULTILINE)
289 line_rex = re.compile(r'([\S]+:)\s([\S]+:)\s([\S ]+)\n', re.MULTILINE)
290 ewl = stat_rex.search(cpatch_data)
291 try:
292 _errors, _warnings, _lines = ewl.groups()
293 except Exception as E:
294 print("Exception parsing checkpatch file.", E)
295 # If there is text but not in know format return -1 and fail job
296 _errors = _warnings = _lines = "-1"
297 checkpath_entries = line_rex.findall(cpatch_data)
298
299 for en in checkpath_entries:
300 _file, _line, _ = en[0].split(":")
Galanakis, Minosc3e8c742019-12-02 16:18:50 +0000301 try:
302 _type, _subtype, _ = en[1].split(":")
303 except Exception as e:
304 print("WARNING: Ignoring Malformed checkpatch line: %s" %
305 "".join(en))
306 continue
Minos Galanakisea421232019-06-20 17:11:28 +0100307 _msg = en[2]
308
309 out_data["_metadata_"] = {"errors": _errors,
310 "warnings": _warnings,
311 "lines": _lines,
312 "success": True if not int(_errors)
313 else False}
314
315 E = {"id": _subtype,
316 "verbose": _subtype,
317 "msg": _msg,
318 "location": {"file": _file, "line": _line}
319 }
320 try:
321 out_data["report"][_type.lower()].append(E)
322 except KeyError:
323 out_data["report"][_type.lower()] = [E]
324
325 if out_f:
326 save_json(out_f, out_data)
327 else:
328 pprint(out_data)
329
330
331def jenkins_mdt_collect(out_f):
332 """ Collects Jenkins enviroment information and stores
333 it in a key value list """
334
335 # Jenkins environment parameters are always valid
336 jenkins_env_keys = ["BUILD_ID",
337 "BUILD_URL",
338 "JOB_BASE_NAME",
339 "GERRIT_URL",
340 "GERRIT_PROJECT"]
341 # The following Gerrit parameters only exist when
342 # a job is triggered by a web hook
343 gerrit_trigger_keys = ["GERRIT_CHANGE_NUMBER",
344 "GERRIT_CHANGE_SUBJECT",
345 "GERRIT_CHANGE_ID",
346 "GERRIT_PATCHSET_REVISION",
347 "GERRIT_PATCHSET_NUMBER",
348 "GERRIT_REFSPEC",
349 "GERRIT_CHANGE_URL",
350 "GERRIT_BRANCH",
351 "GERRIT_CHANGE_OWNER_EMAIL",
352 "GERRIT_PATCHSET_UPLOADER_EMAIL"]
353
354 # Find as mamny of the variables in environent
355 el = set(os.environ).intersection(set(jenkins_env_keys +
356 gerrit_trigger_keys))
357 # Format it in key:value pairs
358 out_data = {n: os.environ[n] for n in el}
359 if out_f:
360 save_json(out_f, out_data)
361 else:
362 pprint(out_data)
363
364
365def metadata_collect(user_args):
366 """ Logic for information collection during different stages of
367 the build """
368
369 if user_args.dependencies_checkout and user_args.content_paths:
370 dependencies_mdt_collect(user_args.content_paths,
371 user_args.out_f)
372 elif user_args.git_info:
373 git_info = get_local_git_info(os.path.abspath(user_args.git_info))
374
375 if user_args.out_f:
376 save_json(user_args.out_f, git_info)
377 else:
378 pprint(git_info)
379 elif user_args.cppcheck_files:
380 cppcheck_mdt_collect(user_args.cppcheck_files, user_args.out_f)
381 elif user_args.checkpatch_file:
382 checkpatch_mdt_collect(user_args.checkpatch_file, user_args.out_f)
383 elif user_args.jenkins_info:
384 jenkins_mdt_collect(user_args.out_f)
385 else:
386 print("Invalid Metadata collection arguments")
387 print(user_args)
388 sys.exit(1)
389
390
391def collate_report(key_file_list, ouput_f=None, stdout=True):
392 """ Join different types of json formatted reports into one """
393
394 out_data = {"_metadata_": {}, "report": {}}
395 for kf in key_file_list:
396 try:
397 key, fl = kf.split("=")
398 data = load_json(fl)
399 # If data is a standard reprort (metdata-report parse it)
400 if ("_metadata_" in data.keys() and "report" in data.keys()):
401 out_data["_metadata_"][key] = data["_metadata_"]
402 out_data["report"][key] = data["report"]
403 # Else treat it as a raw information passing dataset
404 else:
405 try:
406 out_data["info"][key] = data
407 except KeyError as E:
408 out_data["info"] = {key: data}
409 except Exception as E:
410 print("Exception parsing argument", kf, E)
411 continue
412 if ouput_f:
413 save_json(ouput_f, out_data)
414 elif stdout:
415 pprint(out_data)
416 return out_data
417
418
419def filter_report(key_value_list, input_f, ouput_f):
420 """ Generates a subset of the data contained in
421 input_f, by selecting only the values defined in key_value list """
422
423 try:
424 rep_data = load_json(input_f)
425 except Exception as E:
426 print("Exception parsing ", input_f, E)
427 sys.exit(1)
428
429 out_data = {}
430 for kf in key_value_list:
431 try:
432 tag, value = kf.split("=")
433 # if multiple selection
434 if(",") in value:
435 out_data[tag] = {}
436 for v in value.split(","):
437 data = rep_data[tag][v]
438 out_data[tag][v] = data
439 else:
440 data = rep_data[tag][value]
441 out_data[tag] = {value: data}
442 except Exception as E:
443 print("Could not extract data-set for k: %s v: %s" % (tag, value))
444 print(E)
445 continue
446 if ouput_f:
447 save_json(ouput_f, out_data)
448 else:
449 pprint(out_data)
450
451
452def parse_report(user_args):
453 """ Parse a report and attempt to determine if it is overall successful or
454 not. It will set the script's exit code accordingly """
455
456 # Parse Mode
457 in_rep = load_json(user_args.report)
458 report_eval = None
459
460 # Extract the required condition for evalutation to pass
461 pass_key, pass_val = split_keys(user_args.set_pass)
462
463 print("Evaluation will succeed if \"%s\" is \"%s\"" % (pass_key,
464 pass_val))
465 try:
466 report_eval = in_rep["_metadata_"][pass_key] == pass_val
467 print("Evaluating detected '%s' field in _metaddata_. " % pass_key)
468 except Exception as E:
469 pass
470
471 if report_eval is None:
472 if isinstance(in_rep, dict):
473 # If report contains an overall success field in metadata do not
474 # parse the items
475 in_rep = in_rep["report"]
476 ev_list = in_rep.values()
477 elif isinstance(in_rep, list):
478 ev_list = in_rep
479 else:
480 print("Invalid data type: %s" % type(in_rep))
481 return
482
483 if user_args.onepass:
484 try:
485 report_eval = in_rep[user_args.onepass][pass_key] == pass_val
486 except Exception as e:
487 report_eval = False
488
489 # If every singel field need to be succesfful, invert the check and
490 # look for those who are not
491 elif user_args.allpass:
492 try:
493 if list(filter(lambda x: x[pass_key] != pass_val, ev_list)):
494 pass
495 else:
496 report_eval = True
497 except Exception as e:
498 print(e)
499 report_eval = False
500 else:
501 print("Evaluation condition not set. Please use -a or -o. Launch"
502 "help (-h) for more information")
503
504 print("Evaluation %s" % ("passed" if report_eval else "failed"))
505 if user_args.eif:
506 print("Setting script exit status")
507 sys.exit(0 if report_eval else 1)
508
509
510def main(user_args):
511 """ Main logic """
512
513 # Metadat Collect Mode
514 if user_args.collect:
515 metadata_collect(user_args)
516 return
517 elif user_args.filter_report:
518 filter_report(user_args.filter_report,
519 user_args.report,
520 user_args.out_f)
521 elif user_args.collate_report:
522 collate_report(user_args.collate_report, user_args.out_f)
523 else:
524 parse_report(user_args)
525
526
527def get_cmd_args():
528 """ Parse command line arguments """
529
530 # Parse command line arguments to override config
531 parser = argparse.ArgumentParser(description="TFM Report Parser.")
532 parser.add_argument("-e", "--error_if_failed",
533 dest="eif",
534 action="store_true",
535 help="If set will change the script exit code")
536 parser.add_argument("-s", "--set-success-field",
537 dest="set_pass",
538 default="status = Success",
539 action="store",
540 help="Set the key which the script will use to"
541 "assert success/failure")
542 parser.add_argument("-a", "--all-fields-must-pass",
543 dest="allpass",
544 action="store_true",
545 help="When set and a list is provided, all entries"
546 "must be succefull for evaluation to pass")
547 parser.add_argument("-o", "--one-field-must-pass",
548 dest="onepass",
549 action="store",
550 help="Only the user defined field must pass")
551 parser.add_argument("-r", "--report",
552 dest="report",
553 action="store",
554 help="JSON file containing input report")
555 parser.add_argument("-c", "--collect",
556 dest="collect",
557 action="store_true",
558 help="When set, the parser will attempt to collect"
559 "information and produce a report")
560 parser.add_argument("-d", "--dependencies-checkout",
561 dest="dependencies_checkout",
562 action="store_true",
563 help="Collect information from a dependencies "
564 "checkout job")
565 parser.add_argument("-f", "--output-file",
566 dest="out_f",
567 action="store",
568 help="Output file to store captured information")
569 parser.add_argument('-p', '--content-paths',
570 dest="content_paths",
571 nargs='*',
572 help=("Pass a space separated list of paths in the"
573 "following format: -p mbedtls=/yourpath/"
574 "fpv=/another/path .Used in conjuction with -n"))
575 parser.add_argument("-g", "--git-info",
576 dest="git_info",
577 action="store",
578 help="Extract git information from given path. "
579 "Requires --colect directive. Optional parameter"
580 "--output-file ")
581 parser.add_argument("-x", "--cpp-check-xml",
582 dest="cppcheck_files",
583 nargs='*',
584 action="store",
585 help="Extract cppcheck static analysis information "
586 " output files, provided as a space separated "
587 "list. Requires --colect directive."
588 " Optional parameter --output-file ")
589 parser.add_argument("-z", "--checkpatch-parse-f",
590 dest="checkpatch_file",
591 action="store",
592 help="Extract checkpatch static analysis information "
593 " output file. Requires --colect directive."
594 " Optional parameter --output-file ")
595 parser.add_argument("-j", "--jenkins-info",
596 dest="jenkins_info",
597 action="store_true",
598 help="Extract jenkings and gerrit trigger enviroment "
599 "information fr. Requires --colect directive."
600 " Optional parameter --output-file ")
601 parser.add_argument("-l", "--collate-report",
602 dest="collate_report",
603 action="store",
604 nargs='*',
605 help="Pass a space separated list of key-value pairs"
606 "following format: -l report_key_0=report_file_0"
607 " report_key_1=report_file_1. Collate will "
608 "generate a joint dataset and print it to stdout."
609 "Optional parameter --output-file ")
610 parser.add_argument("-t", "--filter-report",
611 dest="filter_report",
612 action="store",
613 nargs='*',
614 help="Requires --report parameter for input file."
615 "Pass a space separated list of key-value pairs"
616 "following format: -l report_key_0=value_0"
617 " report_key_1=value_0. Filter will remote all"
618 "entries of the original report but the ones"
619 "mathing the key:value pairs defined and print it"
620 "to stdout.Optional parameter --output-file")
621 return parser.parse_args()
622
623
624if __name__ == "__main__":
625 main(get_cmd_args())