blob: 2096b8baa7543e8967ddeac6f3bb23107de59ce8 [file] [log] [blame]
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +01001#!/usr/bin/env python3
2
3""" utils.py:
4
5 various simple and commonly used methods and classes shared by the scripts
6 in the CI environment """
7
8from __future__ import print_function
9
10__copyright__ = """
11/*
12 * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
13 *
14 * SPDX-License-Identifier: BSD-3-Clause
15 *
16 */
17 """
18__author__ = "Minos Galanakis"
19__email__ = "minos.galanakis@linaro.org"
20__project__ = "Trusted Firmware-M Open CI"
21__status__ = "stable"
Minos Galanakisea421232019-06-20 17:11:28 +010022__version__ = "1.1"
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010023
24import os
Minos Galanakisea421232019-06-20 17:11:28 +010025import re
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010026import sys
27import yaml
Minos Galanakisea421232019-06-20 17:11:28 +010028import requests
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010029import argparse
30import json
31import itertools
Minos Galanakisea421232019-06-20 17:11:28 +010032import xmltodict
33from shutil import move
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010034from collections import OrderedDict, namedtuple
Minos Galanakisea421232019-06-20 17:11:28 +010035from subprocess import Popen, PIPE, STDOUT, check_output
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010036
37
38def detect_python3():
39 """ Return true if script is run with Python3 interpreter """
40
41 return sys.version_info > (3, 0)
42
43
Minos Galanakisea421232019-06-20 17:11:28 +010044def find_missing_files(file_list):
45 """ Return the files that dot not exist in the file_list """
46
47 F = set(file_list)
48 T = set(list(filter(os.path.isfile, file_list)))
49 return list(F.difference(T))
50
51
52def resolve_rel_path(target_path, origin_path=os.getcwd()):
53 """ Resolve relative path from origin to target. By default origin
54 path is current working directory. """
55
56 common = os.path.commonprefix([origin_path, target_path])
57 return os.path.relpath(target_path, common)
58
59
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010060def print_test_dict(data_dict,
61 pad_space=80,
62 identation=5,
63 titl="Summary",
64 pad_char="*"):
65
66 """ Configurable print formatter aimed for dictionaries of the type
67 {"TEST NAME": "RESULT"} used in CI systems. It will also return
68 the string which is printing """
69
70 # Calculate pad space bewteen variables x, y t achieve alignment on y
71 # taking into consideration a maximum aligment boundary p and
72 # possible indentation i
73 def flex_pad(x, y, p, i):
74 return " " * (p - i * 2 - len(x) - len(y)) + "-> "
75
76 # Calculate the padding for the dataset
77 tests = [k + flex_pad(k,
78 v,
79 pad_space,
80 identation) + v for k, v in data_dict.items()]
81
82 # Add the identation
83 tests = map(lambda x: " " * identation + x, tests)
84
85 # Convert to string
86 tests = "\n".join(tests)
87
88 # Calcuate the top header padding ceiling any rounding errors
89 hdr_pad = (pad_space - len(titl) - 3) / 2
90
91 if detect_python3():
92 hdr_pad = int(hdr_pad)
93
94 # Generate a print formatting dictionary
95 print_dict = {"pad0": pad_char * (hdr_pad),
96 "pad1": pad_char * (hdr_pad + 1 if len(titl) % 2
97 else hdr_pad),
98 "sumry": tests,
99 "pad2": pad_char * pad_space,
100 "titl": titl}
101
102 # Compose & print the report
103 r = "\n%(pad0)s %(titl)s %(pad1)s\n\n%(sumry)s\n\n%(pad2)s\n" % print_dict
104 print(r)
105 return r
106
107
108def print_test(t_name=None, t_list=None, status="failed", tname="Tests"):
109 """ Print a list of tests in a stuctured ascii table format """
110
111 gfx_line1 = "=" * 80
112 gfx_line2 = "\t" + "-" * 70
113 if t_name:
114 print("%(line)s\n%(name)s\n%(line)s" % {"line": gfx_line1,
115 "name": t_name})
116 print("%s %s:" % (tname, status))
117 print(gfx_line2 + "\n" +
118 "\n".join(["\t| %(key)s%(pad)s|\n%(line)s" % {
119 "key": n,
120 "pad": (66 - len(n)) * " ",
121 "line": gfx_line2} for n in t_list]))
122
123
124def test(test_list,
125 test_dict,
126 test_name="TF-M Test",
127 pass_text=["PASSED", "PRESENT"],
128 error_on_failed=True,
129 summary=True):
130
131 """ Using input of a test_lst and a test results dictionary in the format
132 of test_name: resut key-value pairs, test() method will verify that Every
133 single method in the test_list has been tested and passed. Pass and Failed,
134 status tests can be overriden and error_on_failed flag, exits the script
135 with failure if a single test fails or is not detected. Returns a json
136 containing status and fields for each test passed/failed/missing, if error
137 on failed is not set.
138 """
139
140 t_report = {"name": test_name,
141 "success": None,
142 "passed": [],
143 "failed": [],
144 "missing": []}
145 # Clean-up tests that are not requested by test_list
146 test_dict = {k: v for k, v in test_dict.items() if k in test_list}
147
148 # Calculate the difference of the two sets to find missing tests
149 t_report["missing"] = list(set(test_list) - set(test_dict.keys()))
150
151 # Sor the items into the apropriate lists (failed or passed)
152 # based on their status.
153 for k, v in test_dict.items():
154 # print(k, v)
155 key = "passed" if v in pass_text else "failed"
156 t_report[key] += [k]
157
158 # For the test to pass every singe test in test_list needs to be present
159 # and be in the passed list
160 if len(test_list) == len(t_report["passed"]):
161 t_report["success"] = True
162 else:
163 t_report["success"] = False
164
165 # Print a summary
166 if summary:
167 if t_report["passed"]:
168 print_test(test_name, t_report["passed"], status="passed")
169 if t_report["missing"]:
170 print_test(test_name, t_report["missing"], status="missing")
171 if t_report["failed"]:
172 print_test(test_name, t_report["failed"], status="Failed")
173
174 print("\nTest %s has %s!" % (t_report["name"],
175 " been successful" if t_report["success"]
176 else "failed"))
177 print("-" * 80)
178 if error_on_failed:
179 syscode = 0 if t_report["success"] else 1
180 sys.exit(syscode)
181 return t_report
182
183
184def save_json(f_name, data_object):
185 """ Save object to json file """
186
187 with open(f_name, "w") as F:
188 F.write(json.dumps(data_object, indent=2))
189
190
191def save_dict_json(f_name, data_dict, sort_list=None):
192 """ Save a dictionary object to file with optional sorting """
193
194 if sort_list:
195 data_object = (sort_dict(data_dict, sort_list))
196 save_json(f_name, data_object)
197
198
199def sort_dict(config_dict, sort_order_list=None):
200 """ Create a fixed order disctionary out of a config dataset """
201
202 if sort_order_list:
203 ret = OrderedDict([(k, config_dict[k]) for k in sort_order_list])
204 else:
205 ret = OrderedDict([(k, config_dict[k]) for k in sorted(config_dict)])
206 return ret
207
208
209def load_json(f_name):
210 """ Load object from json file """
211
212 with open(f_name, "r") as F:
213 try:
214 return json.loads(F.read())
215 except ValueError as exc:
216 print("No JSON object could be decoded from file: %s" % f_name)
217 except IOError:
218 print("Error opening file: %s" % f_name)
219 raise Exception("Failed to load file")
220
221
222def load_yaml(f_name):
223
224 # Parse command line arguments to override config
225 with open(f_name, "r") as F:
226 try:
227 return yaml.load(F.read())
228 except yaml.YAMLError as exc:
229 print("Error parsing file: %s" % f_name)
230 except IOError:
231 print("Error opening file: %s" % f_name)
232 raise Exception("Failed to load file")
233
234
235def subprocess_log(cmd, log_f, prefix=None, append=False, silent=False):
236 """ Run a command as subproccess an log the output to stdout and fileself.
237 If prefix is spefified it will be added as the first line in file """
238
239 with open(log_f, 'a' if append else "w") as F:
240 if prefix:
241 F.write(prefix + "\n")
242 pcss = Popen(cmd,
243 stdout=PIPE,
244 stderr=STDOUT,
245 shell=True,
246 env=os.environ)
247 for line in pcss.stdout:
248 if detect_python3():
249 line = line.decode("utf-8")
250 if not silent:
251 sys.stdout.write(line)
252 F.write(line)
253 pcss.communicate()
254 return pcss.returncode
255 return
256
257
258def run_proccess(cmd):
259 """ Run a command as subproccess an log the output to stdout and file.
260 If prefix is spefified it will be added as the first line in file """
261
262 pcss = Popen(cmd,
263 stdout=PIPE,
264 stderr=PIPE,
265 shell=True,
266 env=os.environ)
267 pcss.communicate()
268 return pcss.returncode
269
270
Minos Galanakisea421232019-06-20 17:11:28 +0100271def get_pid_status(pid):
272 """ Read the procfc in Linux machines to determine a proccess's statusself.
273 Returns status if proccess exists or None if it does not """
274
275 try:
276 with open("/proc/%s/status" % pid, "r") as F:
277 full_state = F.read()
278 return re.findall(r'(?:State:\t[A-Z]{1} \()(\w+)',
279 full_state, re.MULTILINE)[0]
280 except Exception as e:
281 print("Exception", e)
282
283
284def check_pid_status(pid, status_list):
285 """ Check a proccess's status againist a provided lists and return True
286 if the proccess exists and has a status included in the list. (Linux) """
287
288 pid_status = get_pid_status(pid)
289
290 if not pid_status:
291 print("PID %s does not exist." % pid)
292 return False
293
294 ret = pid_status in status_list
295 # TODO Remove debug print
296 if not ret:
297 print("PID status %s not in %s" % (pid_status, ",".join(status_list)))
298 return ret
299
300
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100301def list_chunks(l, n):
302 """ Yield successive n-sized chunks from l. """
303
304 for i in range(0, len(l), n):
305 yield l[i:i + n]
306
307
308def export_config_map(config_m, dir=None):
309 """ Will export a dictionary of configurations to a group of JSON files """
310
311 _dir = dir if dir else os.getcwd()
312 for _cname, _cfg in config_m.items():
313 _cname = _cname.lower()
314 _fname = os.path.join(_dir, _cname + ".json")
315 print("Exporting config %s" % _fname)
316 save_json(_fname, _cfg)
317
318
319def gen_cfg_combinations(name, categories, *args):
320 """ Create a list of named tuples of `name`, with elements defined in a
321 space separated string `categories` and equal ammount of lists for said
322 categories provided as arguments. Order of arguments should match the
323 order of the categories lists """
324
325 build_config = namedtuple(name, categories)
326 return [build_config(*x) for x in itertools.product(*args)]
327
328
Minos Galanakisea421232019-06-20 17:11:28 +0100329def show_progress(current_count, total_count):
330 """ Display the percent progress percentage of input metric a over b """
331
332 progress = int((current_count / total_count) * 100)
333 completed_count = int(progress * 0.7)
334 remaining_count = 70 - completed_count
335 print("[ %s%s | %d%% ]" % ("#" * completed_count,
336 "~" * remaining_count,
337 progress))
338
339
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100340def get_cmd_args(descr="", parser=None):
341 """ Parse command line arguments """
342 # Parse command line arguments to override config
343
344 if not parser:
345 parser = argparse.ArgumentParser(description=descr)
346 return parser.parse_args()
Minos Galanakisea421232019-06-20 17:11:28 +0100347
348
349def arm_non_eabi_size(filename):
350 """ Run arm-non-eabi-size command and parse the output using regex. Will
351 return a tuple with the formated data as well as the raw output of the
352 command """
353
354 size_info_rex = re.compile(r'^\s+(?P<text>[0-9]+)\s+(?P<data>[0-9]+)\s+'
355 r'(?P<bss>[0-9]+)\s+(?P<dec>[0-9]+)\s+'
356 r'(?P<hex>[0-9a-f]+)\s+(?P<file>\S+)',
357 re.MULTILINE)
358
359 eabi_size = check_output(["arm-none-eabi-size",
360 filename],
361 timeout=2).decode('UTF-8').rstrip()
362
363 size_data = re.search(size_info_rex, eabi_size)
364
365 return [{"text": size_data.group("text"),
366 "data": size_data.group("data"),
367 "bss": size_data.group("bss"),
368 "dec": size_data.group("dec"),
369 "hex": size_data.group("hex")}, eabi_size]
370
371
372def list_subdirs(directory):
373
374 directory = os.path.abspath(directory)
375 abs_sub_dirs = [os.path.join(directory, n) for n in os.listdir(directory)]
376 return [n for n in abs_sub_dirs if os.path.isdir(os.path.realpath(n))]
377
378
379def get_local_git_info(directory, json_out_f=None):
380 """ Extract git related information from a target directory. It allows
381 optional export to json file """
382
383 directory = os.path.abspath(directory)
384 cur_dir = os.path.abspath(os.getcwd())
385 os.chdir(directory)
386
387 # System commands to collect information
388 cmd1 = "git log HEAD -n 1 --pretty=format:'%H%x09%an%x09%ae%x09%ai%x09%s'"
389 cmd2 = "git log HEAD -n 1 --pretty=format:'%b'"
390 cmd3 = "git remote -v | head -n 1 | awk '{ print $2}';"
391 cmd4 = ("git ls-remote --heads origin | "
392 "grep $(git rev-parse HEAD) | cut -d / -f 3")
393
394 git_info_rex = re.compile(r'(?P<body>^[\s\S]*?)((?:Change-Id:\s)'
395 r'(?P<change_id>.*)\n)((?:Signed-off-by:\s)'
396 r'(?P<sign_off>.*)\n?)', re.MULTILINE)
397
398 proc_res = []
399 for cmd in [cmd1, cmd2, cmd3, cmd4]:
400 r, e = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
401 if e:
402 print("Error", e)
403 return
404 else:
405 try:
406 txt_body = r.decode('ascii')
407 except UnicodeDecodeError as E:
408 txt_body = r.decode('utf-8')
409 proc_res.append(txt_body.rstrip())
410
411 # Unpack and tag the data
412 hash, name, email, date, subject = proc_res[0].split('\t')
413
414 _raw_body = proc_res[1]
415 _bd_items = re.findall(r'(Signed-off-by|Change-Id)', _raw_body,
416 re.MULTILINE)
417
418 signed_off = None
419 body = None
420 change_id = None
421 # If both sign-off and gerrit-id exist
422 if len(_bd_items) == 2:
423 m = git_info_rex.search(_raw_body)
424 print(git_info_rex.findall(_raw_body))
425 if m is not None:
426 match_dict = m.groupdict()
427 if "body" in match_dict.keys():
428 body = match_dict["body"]
429 if "sign_off" in match_dict.keys():
430 signed_off = match_dict["sign_off"]
431 if "change_id" in match_dict.keys():
432 change_id = match_dict["change_id"]
433 else:
434 print("Error: Could not regex parse message", repr(_raw_body))
435 body = _raw_body
436 # If only one of sign-off / gerrit-id exist
437 elif len(_bd_items) == 1:
438 _entry_key = _bd_items[0]
439 body, _extra = _raw_body.split(_entry_key)
440 if _entry_key == "Change-Id":
441 change_id = _extra
442 else:
443 signed_off = _extra
444 # If the message contains commit message body only
445 else:
446 body = _raw_body
447
448 # Attempt to read the branch from Gerrit Trigger
449 try:
450 branch = os.environ["GERRIT_BRANCH"]
451 # IF not compare the commit hash with the remote branches to determine the
452 # branch of origin. Warning this assumes that only one branch has its head
453 # on this commit.
454 except KeyError as E:
455 branch = proc_res[3]
456
457 remote = proc_res[2]
458 # Internal Gerrit specific code
459 # Intended for converting the git remote to a more usuable url
460 known_remotes = ["https://gerrit.oss.arm.com",
461 "http://gerrit.mirror.oss.arm.com"]
462
463 for kr in known_remotes:
464 if kr in remote:
465 print("Applying Remote specific patch to remote", kr)
466
467 remote = remote.split(kr)[-1][1:]
468 print("REMOTE", remote)
469 remote = "%s/gitweb?p=%s.git;a=commit;h=%s" % (kr, remote, hash)
470 break
471
472 out = {"author": name.strip(),
473 "email": email.strip(),
474 "dir": directory.strip(),
475 "remote": remote.strip(),
476 "date": date.strip(),
477 "commit": hash.strip(),
478 "subject": subject.strip(),
479 "message": body.strip(),
480 "change_id": change_id.strip() if change_id is not None else "N.A",
481 "sign_off": signed_off.strip() if signed_off is not None else "N.A",
482 "branch": branch.strip()}
483
484 # Restore the directory path
485 os.chdir(cur_dir)
486 if json_out_f:
487 save_json(json_out_f, out)
488 return out
489
490
491def get_remote_git_info(url):
492 """ Collect git information from a Linux Kernel web repository """
493
494 auth_rex = re.compile(r'(?:<th>author</th>.*)(?:span>)(.*)'
495 r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
496 # commiter_rex = re.compile(r'(?:<th>committer</th>.*)(?:</div>)(.*)'
497 # r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
498 subject_rex = re.compile(r'(?:\'commit-subject\'>)(.*)(?:</div>)')
499 body_rex = re.compile(r'(?:\'commit-msg\'>)([\s\S^<]*)(?:</div>'
500 r'<div class=\'diffstat-header\'>)', re.MULTILINE)
501
502 content = requests.get(url).text
503 author, date = re.search(auth_rex, content).groups()
504 subject = re.search(subject_rex, content).groups()[0]
505 body = re.search(body_rex, content).groups()[0]
506 remote, hash = url.split("=")
507
508 outdict = {"author": author,
509 "remote": remote[:-3],
510 "date": date,
511 "commit": hash,
512 "subject": subject,
513 "message": body}
514 # Clean up html noise
515 return {k: re.sub(r'&[a-z]t;?', "", v) for k, v in outdict.items()}
516
517
518def convert_git_ref_path(dir_path):
519 """ If a git long hash is detected in a path move it to a short hash """
520
521 # Detect a git hash on a directory naming format of name_{hash},
522 # {hash}, name-{hash}
523 git_hash_rex = re.compile(r'(?:[_|-])*([a-f0-9]{40})')
524
525 # if checkout directory name contains a git reference convert to short
526 git_hash = git_hash_rex.findall(dir_path)
527 if len(git_hash):
528 d = dir_path.replace(git_hash[0], git_hash[0][:7])
529 print("Renaming %s -> %s", dir_path, d)
530 move(dir_path, d)
531 dir_path = d
532 return dir_path
533
534
535def xml_read(file):
536 """" Read the contects of an xml file and convert it to python object """
537
538 data = None
539 try:
540 with open(file, "r") as F:
541 data = xmltodict.parse(F.read())
542 except Exception as E:
543 print("Error", E)
544 return data
545
546
547def list_filtered_tree(directory, rex_filter=None):
548 ret = []
549 for path, subdirs, files in os.walk(directory):
550 for fname in files:
551 ret.append(os.path.join(path, fname))
552 if rex_filter:
553 rex = re.compile(rex_filter)
554 return [n for n in ret if rex.search(n)]
555 else:
556 return ret
557
558
559def gerrit_patch_from_changeid(remote, change_id):
560 """ Use Gerrit's REST api for a best effort to retrieve the url of the
561 patch-set under review """
562
563 try:
564 r = requests.get('%s/changes/%s' % (remote, change_id),
565 headers={'Accept': 'application/json'})
566 resp_data = r.text[r.text.find("{"):].rstrip()
567 change_no = json.loads(resp_data)["_number"]
568 return "%s/#/c/%s" % (remote, change_no)
569 except Exception as E:
570 print("Failed to retrieve change (%s) from URL %s" % (change_id,
571 remote))
572 print("Exception Thrown:", E)
573 raise Exception()