Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | # |
| 3 | # Copyright (c) 2019, Linaro Limited |
| 4 | # |
| 5 | # SPDX-License-Identifier: BSD-2-Clause |
| 6 | |
| 7 | from pathlib import PurePath |
| 8 | from urllib.request import urlopen |
| 9 | |
| 10 | import argparse |
| 11 | import glob |
| 12 | import os |
| 13 | import re |
| 14 | import tempfile |
| 15 | |
| 16 | |
| 17 | DIFF_GIT_RE = re.compile(r'^diff --git a/(?P<path>.*) ') |
| 18 | REVIEWED_RE = re.compile(r'^Reviewed-by: (?P<approver>.*>)') |
| 19 | ACKED_RE = re.compile(r'^Acked-by: (?P<approver>.*>)') |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 20 | PATCH_START = re.compile(r'^From [0-9a-f]{40}') |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 21 | |
| 22 | |
| 23 | def get_args(): |
| 24 | parser = argparse.ArgumentParser(description='Print the maintainers for ' |
| 25 | 'the given source files or directories; ' |
| 26 | 'or for the files modified by a patch or ' |
| 27 | 'a pull request. ' |
| 28 | '(With -m) Check if a patch or pull ' |
| 29 | 'request is properly Acked/Reviewed for ' |
| 30 | 'merging.') |
| 31 | parser.add_argument('-m', '--merge-check', action='store_true', |
| 32 | help='use Reviewed-by: and Acked-by: tags found in ' |
| 33 | 'patches to prevent display of information for all ' |
| 34 | 'the approved paths.') |
| 35 | parser.add_argument('-p', '--show-paths', action='store_true', |
| 36 | help='show all paths that are not approved.') |
| 37 | parser.add_argument('-s', '--strict', action='store_true', |
| 38 | help='stricter conditions for patch approval check: ' |
| 39 | 'subsystem "THE REST" is ignored for paths that ' |
| 40 | 'match some other subsystem.') |
| 41 | parser.add_argument('arg', nargs='*', help='file or patch') |
| 42 | parser.add_argument('-f', '--file', action='append', |
| 43 | help='treat following argument as a file path, not ' |
| 44 | 'a patch.') |
| 45 | parser.add_argument('-g', '--github-pr', action='append', type=int, |
| 46 | help='Github pull request ID. The script will ' |
| 47 | 'download the patchset from Github to a temporary ' |
| 48 | 'file and process it.') |
| 49 | return parser.parse_args() |
| 50 | |
| 51 | |
| 52 | # Parse MAINTAINERS and return a dictionary of subsystems such as: |
| 53 | # {'Subsystem name': {'R': ['foo', 'bar'], 'S': ['Maintained'], |
| 54 | # 'F': [ 'path1', 'path2' ]}, ...} |
| 55 | def parse_maintainers(): |
| 56 | subsystems = {} |
| 57 | cwd = os.getcwd() |
| 58 | parent = os.path.dirname(os.path.realpath(__file__)) + "/../" |
| 59 | if (os.path.realpath(cwd) != os.path.realpath(parent)): |
| 60 | print("Error: this script must be run from the top-level of the " |
| 61 | "optee_os tree") |
| 62 | exit(1) |
| 63 | with open("MAINTAINERS", "r") as f: |
| 64 | start_found = False |
| 65 | ss = {} |
| 66 | name = '' |
| 67 | for line in f: |
| 68 | line = line.strip() |
| 69 | if not line: |
| 70 | continue |
| 71 | if not start_found: |
| 72 | if line.startswith("----------"): |
| 73 | start_found = True |
| 74 | continue |
| 75 | |
| 76 | if line[1] == ':': |
| 77 | letter = line[0] |
| 78 | if (not ss.get(letter)): |
| 79 | ss[letter] = [] |
| 80 | ss[letter].append(line[3:]) |
| 81 | else: |
| 82 | if name: |
| 83 | subsystems[name] = ss |
| 84 | name = line |
| 85 | ss = {} |
| 86 | if name: |
| 87 | subsystems[name] = ss |
| 88 | |
| 89 | return subsystems |
| 90 | |
| 91 | |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 92 | # If @patchset is a patchset files and contains 2 patches or more, write |
| 93 | # individual patches to temporary files and return the paths. |
| 94 | # Otherwise return []. |
| 95 | def split_patchset(patchset): |
| 96 | psname = os.path.basename(patchset).replace('.', '_') |
| 97 | patchnum = 0 |
| 98 | of = None |
| 99 | ret = [] |
| 100 | f = None |
| 101 | try: |
| 102 | f = open(patchset, "r") |
Jerome Forissier | 71c9b07 | 2019-09-03 08:32:38 +0200 | [diff] [blame^] | 103 | except OSError: |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 104 | return [] |
| 105 | for line in f: |
| 106 | match = re.search(PATCH_START, line) |
| 107 | if match: |
| 108 | # New patch found: create new file |
| 109 | patchnum += 1 |
| 110 | prefix = "{}_{}_".format(patchnum, psname) |
| 111 | of = tempfile.NamedTemporaryFile(mode="w", prefix=prefix, |
| 112 | suffix=".patch", |
| 113 | delete=False) |
| 114 | ret.append(of.name) |
| 115 | if of: |
| 116 | of.write(line) |
| 117 | if len(ret) >= 2: |
| 118 | return ret |
| 119 | if len(ret) == 1: |
| 120 | os.remove(ret[0]) |
| 121 | return [] |
| 122 | |
| 123 | |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 124 | # If @path is a patch file, returns the paths touched by the patch as well |
| 125 | # as the content of the review/ack tags |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 126 | def get_paths_from_patch(patch): |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 127 | paths = [] |
| 128 | approvers = [] |
| 129 | try: |
| 130 | with open(patch, "r") as f: |
| 131 | for line in f: |
| 132 | match = re.search(DIFF_GIT_RE, line) |
| 133 | if match: |
| 134 | p = match.group('path') |
| 135 | if p not in paths: |
| 136 | paths.append(p) |
| 137 | continue |
| 138 | match = re.search(REVIEWED_RE, line) |
| 139 | if match: |
| 140 | a = match.group('approver') |
| 141 | if a not in approvers: |
| 142 | approvers.append(a) |
| 143 | continue |
| 144 | match = re.search(ACKED_RE, line) |
| 145 | if match: |
| 146 | a = match.group('approver') |
| 147 | if a not in approvers: |
| 148 | approvers.append(a) |
| 149 | continue |
| 150 | except Exception: |
| 151 | pass |
| 152 | return (paths, approvers) |
| 153 | |
| 154 | |
| 155 | # Does @path match @pattern? |
| 156 | # @pattern has the syntax defined in the Linux MAINTAINERS file -- mostly a |
| 157 | # shell glob pattern, except that a trailing slash means a directory and |
| 158 | # everything below. Matching can easily be done by converting to a regexp. |
| 159 | def match_pattern(path, pattern): |
| 160 | # Append a trailing slash if path is an existing directory, so that it |
| 161 | # matches F: entries such as 'foo/bar/' |
| 162 | if not path.endswith('/') and os.path.isdir(path): |
| 163 | path += '/' |
| 164 | rep = "^" + pattern |
| 165 | rep = rep.replace('*', '[^/]+') |
| 166 | rep = rep.replace('?', '[^/]') |
| 167 | if rep.endswith('/'): |
| 168 | rep += '.*' |
| 169 | rep += '$' |
| 170 | return not not re.match(rep, path) |
| 171 | |
| 172 | |
| 173 | def get_subsystems_for_path(subsystems, path, strict): |
| 174 | found = {} |
| 175 | for key in subsystems: |
| 176 | def inner(): |
| 177 | excluded = subsystems[key].get('X') |
| 178 | if excluded: |
| 179 | for pattern in excluded: |
| 180 | if match_pattern(path, pattern): |
| 181 | return # next key |
| 182 | included = subsystems[key].get('F') |
| 183 | if not included: |
| 184 | return # next key |
| 185 | for pattern in included: |
| 186 | if match_pattern(path, pattern): |
| 187 | found[key] = subsystems[key] |
| 188 | inner() |
| 189 | if strict and len(found) > 1: |
| 190 | found.pop('THE REST', None) |
| 191 | return found |
| 192 | |
| 193 | |
| 194 | def get_ss_maintainers(subsys): |
| 195 | return subsys.get('M') or [] |
| 196 | |
| 197 | |
| 198 | def get_ss_reviewers(subsys): |
| 199 | return subsys.get('R') or [] |
| 200 | |
| 201 | |
| 202 | def get_ss_approvers(ss): |
| 203 | return get_ss_maintainers(ss) + get_ss_reviewers(ss) |
| 204 | |
| 205 | |
| 206 | def approvers_have_approved(approved_by, approvers): |
| 207 | for n in approvers: |
| 208 | # Ignore anything after the email (Github ID...) |
| 209 | n = n.split('>', 1)[0] |
| 210 | for m in approved_by: |
| 211 | m = m.split('>', 1)[0] |
| 212 | if n == m: |
| 213 | return True |
| 214 | return False |
| 215 | |
| 216 | |
| 217 | def download(pr): |
| 218 | url = "https://github.com/OP-TEE/optee_os/pull/{}.patch".format(pr) |
| 219 | f = tempfile.NamedTemporaryFile(mode="wb", prefix="pr{}_".format(pr), |
| 220 | suffix=".patch", delete=False) |
| 221 | print("Downloading {}...".format(url), end='', flush=True) |
| 222 | f.write(urlopen(url).read()) |
| 223 | print(" Done.") |
| 224 | return f.name |
| 225 | |
| 226 | |
| 227 | def main(): |
| 228 | global args |
| 229 | |
| 230 | args = get_args() |
| 231 | all_subsystems = parse_maintainers() |
| 232 | paths = [] |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 233 | arglist = [] |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 234 | downloads = [] |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 235 | split_patches = [] |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 236 | |
| 237 | for pr in args.github_pr or []: |
| 238 | downloads += [download(pr)] |
| 239 | |
| 240 | for arg in args.arg + downloads: |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 241 | if os.path.exists(arg): |
| 242 | patches = split_patchset(arg) |
| 243 | if patches: |
| 244 | split_patches += patches |
| 245 | continue |
| 246 | arglist.append(arg) |
| 247 | |
| 248 | for arg in arglist + split_patches: |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 249 | patch_paths = [] |
| 250 | approved_by = [] |
| 251 | if os.path.exists(arg): |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 252 | # Try to parse as a patch |
| 253 | (patch_paths, approved_by) = get_paths_from_patch(arg) |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 254 | if not patch_paths: |
| 255 | # Not a patch, consider the path itself |
| 256 | # as_posix() cleans the path a little bit (suppress leading ./ and |
| 257 | # duplicate slashes...) |
| 258 | patch_paths = [PurePath(arg).as_posix()] |
| 259 | for path in patch_paths: |
| 260 | approved = False |
| 261 | if args.merge_check: |
| 262 | ss_for_path = get_subsystems_for_path(all_subsystems, path, |
| 263 | args.strict) |
| 264 | for key in ss_for_path: |
| 265 | ss_approvers = get_ss_approvers(ss_for_path[key]) |
| 266 | if approvers_have_approved(approved_by, ss_approvers): |
| 267 | approved = True |
| 268 | if not approved: |
| 269 | paths += [path] |
| 270 | |
Jerome Forissier | 716f442 | 2019-08-12 16:43:02 +0200 | [diff] [blame] | 271 | for f in downloads + split_patches: |
Jerome Forissier | 28481ff | 2019-08-09 10:46:02 +0200 | [diff] [blame] | 272 | os.remove(f) |
| 273 | |
| 274 | if args.file: |
| 275 | paths += args.file |
| 276 | |
| 277 | if (args.show_paths): |
| 278 | print(paths) |
| 279 | |
| 280 | ss = {} |
| 281 | for path in paths: |
| 282 | ss.update(get_subsystems_for_path(all_subsystems, path, args.strict)) |
| 283 | for key in ss: |
| 284 | ss_name = key[:50] + (key[50:] and '...') |
| 285 | for name in ss[key].get('M') or []: |
| 286 | print("{} (maintainer:{})".format(name, ss_name)) |
| 287 | for name in ss[key].get('R') or []: |
| 288 | print("{} (reviewer:{})".format(name, ss_name)) |
| 289 | |
| 290 | |
| 291 | if __name__ == "__main__": |
| 292 | main() |