Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | # |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 3 | # Copyright The Mbed TLS Contributors |
| 4 | # SPDX-License-Identifier: Apache-2.0 |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 5 | # |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); you may |
| 7 | # not use this file except in compliance with the License. |
| 8 | # You may obtain a copy of the License at |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 9 | # |
| 10 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | # |
| 12 | # Unless required by applicable law or agreed to in writing, software |
| 13 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| 14 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | # See the License for the specific language governing permissions and |
| 16 | # limitations under the License. |
| 17 | |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 18 | """Audit validity date of X509 crt/crl/csr. |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 19 | |
| 20 | This script is used to audit the validity date of crt/crl/csr used for testing. |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 21 | It would print the information about X.509 data if the validity period of the |
| 22 | X.509 data didn't cover the provided validity period. The data are collected |
| 23 | from tests/data_files/ and tests/suites/*.data files by default. |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 24 | """ |
| 25 | |
| 26 | import os |
| 27 | import sys |
| 28 | import re |
| 29 | import typing |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 30 | import argparse |
| 31 | import datetime |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 32 | import glob |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 33 | from enum import Enum |
| 34 | |
Pengyu Lv | 3179232 | 2023-04-11 16:30:54 +0800 | [diff] [blame] | 35 | # The script requires cryptography >= 35.0.0 which is only available |
| 36 | # for Python >= 3.6. Disable the pylint error here until we were |
| 37 | # using modern system on our CI. |
| 38 | from cryptography import x509 #pylint: disable=import-error |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 39 | |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 40 | # reuse the function to parse *.data file in tests/suites/ |
| 41 | from generate_test_code import parse_test_data as parse_suite_data |
| 42 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 43 | class DataType(Enum): |
| 44 | CRT = 1 # Certificate |
| 45 | CRL = 2 # Certificate Revocation List |
| 46 | CSR = 3 # Certificate Signing Request |
| 47 | |
| 48 | class DataFormat(Enum): |
| 49 | PEM = 1 # Privacy-Enhanced Mail |
| 50 | DER = 2 # Distinguished Encoding Rules |
| 51 | |
| 52 | class AuditData: |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 53 | """Store data location, type and validity period of X.509 objects.""" |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 54 | #pylint: disable=too-few-public-methods |
Pengyu Lv | cb8fc32 | 2023-04-11 15:05:29 +0800 | [diff] [blame] | 55 | def __init__(self, data_type: DataType, x509_obj): |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 56 | self.data_type = data_type |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 57 | self.location = "" |
Pengyu Lv | cb8fc32 | 2023-04-11 15:05:29 +0800 | [diff] [blame] | 58 | self.fill_validity_duration(x509_obj) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 59 | |
| 60 | def fill_validity_duration(self, x509_obj): |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 61 | """Read validity period from an X.509 object.""" |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 62 | # Certificate expires after "not_valid_after" |
| 63 | # Certificate is invalid before "not_valid_before" |
| 64 | if self.data_type == DataType.CRT: |
| 65 | self.not_valid_after = x509_obj.not_valid_after |
| 66 | self.not_valid_before = x509_obj.not_valid_before |
| 67 | # CertificateRevocationList expires after "next_update" |
| 68 | # CertificateRevocationList is invalid before "last_update" |
| 69 | elif self.data_type == DataType.CRL: |
| 70 | self.not_valid_after = x509_obj.next_update |
| 71 | self.not_valid_before = x509_obj.last_update |
| 72 | # CertificateSigningRequest is always valid. |
| 73 | elif self.data_type == DataType.CSR: |
| 74 | self.not_valid_after = datetime.datetime.max |
| 75 | self.not_valid_before = datetime.datetime.min |
| 76 | else: |
| 77 | raise ValueError("Unsupported file_type: {}".format(self.data_type)) |
| 78 | |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 79 | class X509Parser: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 80 | """A parser class to parse crt/crl/csr file or data in PEM/DER format.""" |
| 81 | PEM_REGEX = br'-{5}BEGIN (?P<type>.*?)-{5}\n(?P<data>.*?)-{5}END (?P=type)-{5}\n' |
| 82 | PEM_TAG_REGEX = br'-{5}BEGIN (?P<type>.*?)-{5}\n' |
| 83 | PEM_TAGS = { |
| 84 | DataType.CRT: 'CERTIFICATE', |
| 85 | DataType.CRL: 'X509 CRL', |
| 86 | DataType.CSR: 'CERTIFICATE REQUEST' |
| 87 | } |
| 88 | |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 89 | def __init__(self, |
| 90 | backends: |
| 91 | typing.Dict[DataType, |
| 92 | typing.Dict[DataFormat, |
| 93 | typing.Callable[[bytes], object]]]) \ |
| 94 | -> None: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 95 | self.backends = backends |
| 96 | self.__generate_parsers() |
| 97 | |
| 98 | def __generate_parser(self, data_type: DataType): |
| 99 | """Parser generator for a specific DataType""" |
| 100 | tag = self.PEM_TAGS[data_type] |
| 101 | pem_loader = self.backends[data_type][DataFormat.PEM] |
| 102 | der_loader = self.backends[data_type][DataFormat.DER] |
| 103 | def wrapper(data: bytes): |
| 104 | pem_type = X509Parser.pem_data_type(data) |
| 105 | # It is in PEM format with target tag |
| 106 | if pem_type == tag: |
| 107 | return pem_loader(data) |
| 108 | # It is in PEM format without target tag |
| 109 | if pem_type: |
| 110 | return None |
| 111 | # It might be in DER format |
| 112 | try: |
| 113 | result = der_loader(data) |
| 114 | except ValueError: |
| 115 | result = None |
| 116 | return result |
| 117 | wrapper.__name__ = "{}.parser[{}]".format(type(self).__name__, tag) |
| 118 | return wrapper |
| 119 | |
| 120 | def __generate_parsers(self): |
| 121 | """Generate parsers for all support DataType""" |
| 122 | self.parsers = {} |
| 123 | for data_type, _ in self.PEM_TAGS.items(): |
| 124 | self.parsers[data_type] = self.__generate_parser(data_type) |
| 125 | |
| 126 | def __getitem__(self, item): |
| 127 | return self.parsers[item] |
| 128 | |
| 129 | @staticmethod |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 130 | def pem_data_type(data: bytes) -> typing.Optional[str]: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 131 | """Get the tag from the data in PEM format |
| 132 | |
| 133 | :param data: data to be checked in binary mode. |
| 134 | :return: PEM tag or "" when no tag detected. |
| 135 | """ |
| 136 | m = re.search(X509Parser.PEM_TAG_REGEX, data) |
| 137 | if m is not None: |
| 138 | return m.group('type').decode('UTF-8') |
| 139 | else: |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 140 | return None |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 141 | |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 142 | @staticmethod |
| 143 | def check_hex_string(hex_str: str) -> bool: |
| 144 | """Check if the hex string is possibly DER data.""" |
| 145 | hex_len = len(hex_str) |
| 146 | # At least 6 hex char for 3 bytes: Type + Length + Content |
| 147 | if hex_len < 6: |
| 148 | return False |
| 149 | # Check if Type (1 byte) is SEQUENCE. |
| 150 | if hex_str[0:2] != '30': |
| 151 | return False |
| 152 | # Check LENGTH (1 byte) value |
| 153 | content_len = int(hex_str[2:4], base=16) |
| 154 | consumed = 4 |
| 155 | if content_len in (128, 255): |
| 156 | # Indefinite or Reserved |
| 157 | return False |
| 158 | elif content_len > 127: |
| 159 | # Definite, Long |
| 160 | length_len = (content_len - 128) * 2 |
| 161 | content_len = int(hex_str[consumed:consumed+length_len], base=16) |
| 162 | consumed += length_len |
| 163 | # Check LENGTH |
| 164 | if hex_len != content_len * 2 + consumed: |
| 165 | return False |
| 166 | return True |
| 167 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 168 | class Auditor: |
| 169 | """A base class for audit.""" |
| 170 | def __init__(self, verbose): |
| 171 | self.verbose = verbose |
| 172 | self.default_files = [] |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 173 | # A list to store the parsed audit_data. |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 174 | self.audit_data = [] |
| 175 | self.parser = X509Parser({ |
| 176 | DataType.CRT: { |
| 177 | DataFormat.PEM: x509.load_pem_x509_certificate, |
| 178 | DataFormat.DER: x509.load_der_x509_certificate |
| 179 | }, |
| 180 | DataType.CRL: { |
| 181 | DataFormat.PEM: x509.load_pem_x509_crl, |
| 182 | DataFormat.DER: x509.load_der_x509_crl |
| 183 | }, |
| 184 | DataType.CSR: { |
| 185 | DataFormat.PEM: x509.load_pem_x509_csr, |
| 186 | DataFormat.DER: x509.load_der_x509_csr |
| 187 | }, |
| 188 | }) |
| 189 | |
| 190 | def error(self, *args): |
| 191 | #pylint: disable=no-self-use |
| 192 | print("Error: ", *args, file=sys.stderr) |
| 193 | |
| 194 | def warn(self, *args): |
| 195 | if self.verbose: |
| 196 | print("Warn: ", *args, file=sys.stderr) |
| 197 | |
| 198 | def parse_file(self, filename: str) -> typing.List[AuditData]: |
| 199 | """ |
| 200 | Parse a list of AuditData from file. |
| 201 | |
| 202 | :param filename: name of the file to parse. |
| 203 | :return list of AuditData parsed from the file. |
| 204 | """ |
| 205 | with open(filename, 'rb') as f: |
| 206 | data = f.read() |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 207 | result = self.parse_bytes(data) |
| 208 | if result is not None: |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 209 | result.location = filename |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 210 | return [result] |
| 211 | else: |
| 212 | return [] |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 213 | |
| 214 | def parse_bytes(self, data: bytes): |
| 215 | """Parse AuditData from bytes.""" |
| 216 | for data_type in list(DataType): |
| 217 | try: |
| 218 | result = self.parser[data_type](data) |
| 219 | except ValueError as val_error: |
| 220 | result = None |
| 221 | self.warn(val_error) |
| 222 | if result is not None: |
Pengyu Lv | cb8fc32 | 2023-04-11 15:05:29 +0800 | [diff] [blame] | 223 | audit_data = AuditData(data_type, result) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 224 | return audit_data |
| 225 | return None |
| 226 | |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 227 | def walk_all(self, file_list: typing.Optional[typing.List[str]] = None): |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 228 | """ |
| 229 | Iterate over all the files in the list and get audit data. |
| 230 | """ |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 231 | if file_list is None: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 232 | file_list = self.default_files |
| 233 | for filename in file_list: |
| 234 | data_list = self.parse_file(filename) |
| 235 | self.audit_data.extend(data_list) |
| 236 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 237 | @staticmethod |
| 238 | def find_test_dir(): |
| 239 | """Get the relative path for the MbedTLS test directory.""" |
| 240 | if os.path.isdir('tests'): |
| 241 | tests_dir = 'tests' |
| 242 | elif os.path.isdir('suites'): |
| 243 | tests_dir = '.' |
| 244 | elif os.path.isdir('../suites'): |
| 245 | tests_dir = '..' |
| 246 | else: |
| 247 | raise Exception("Mbed TLS source tree not found") |
| 248 | return tests_dir |
| 249 | |
| 250 | class TestDataAuditor(Auditor): |
| 251 | """Class for auditing files in tests/data_files/""" |
| 252 | def __init__(self, verbose): |
| 253 | super().__init__(verbose) |
| 254 | self.default_files = self.collect_default_files() |
| 255 | |
| 256 | def collect_default_files(self): |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 257 | """Collect all files in tests/data_files/""" |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 258 | test_dir = self.find_test_dir() |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 259 | test_data_glob = os.path.join(test_dir, 'data_files/**') |
| 260 | data_files = [f for f in glob.glob(test_data_glob, recursive=True) |
| 261 | if os.path.isfile(f)] |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 262 | return data_files |
| 263 | |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 264 | class FileWrapper(): |
| 265 | """ |
| 266 | This a stub class of generate_test_code.FileWrapper. |
| 267 | |
| 268 | This class reads the whole file to memory before iterating |
| 269 | over the lines. |
| 270 | """ |
| 271 | |
| 272 | def __init__(self, file_name): |
| 273 | """ |
| 274 | Read the file and initialize the line number to 0. |
| 275 | |
| 276 | :param file_name: File path to open. |
| 277 | """ |
| 278 | with open(file_name, 'rb') as f: |
| 279 | self.buf = f.read() |
| 280 | self.buf_len = len(self.buf) |
| 281 | self._line_no = 0 |
| 282 | self._line_start = 0 |
| 283 | |
| 284 | def __iter__(self): |
| 285 | """Make the class iterable.""" |
| 286 | return self |
| 287 | |
| 288 | def __next__(self): |
| 289 | """ |
| 290 | This method for returning a line of the file per iteration. |
| 291 | |
| 292 | :return: Line read from file. |
| 293 | """ |
| 294 | # If we reach the end of the file. |
| 295 | if not self._line_start < self.buf_len: |
| 296 | raise StopIteration |
| 297 | |
| 298 | line_end = self.buf.find(b'\n', self._line_start) + 1 |
| 299 | if line_end > 0: |
| 300 | # Find the first LF as the end of the new line. |
| 301 | line = self.buf[self._line_start:line_end] |
| 302 | self._line_start = line_end |
| 303 | self._line_no += 1 |
| 304 | else: |
| 305 | # No LF found. We are at the last line without LF. |
| 306 | line = self.buf[self._line_start:] |
| 307 | self._line_start = self.buf_len |
| 308 | self._line_no += 1 |
| 309 | |
| 310 | # Convert byte array to string with correct encoding and |
| 311 | # strip any whitespaces added in the decoding process. |
| 312 | return line.decode(sys.getdefaultencoding()).rstrip() + '\n' |
| 313 | |
| 314 | def get_line_no(self): |
| 315 | """ |
| 316 | Gives current line number. |
| 317 | """ |
| 318 | return self._line_no |
| 319 | |
| 320 | line_no = property(get_line_no) |
| 321 | |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 322 | class SuiteDataAuditor(Auditor): |
| 323 | """Class for auditing files in tests/suites/*.data""" |
| 324 | def __init__(self, options): |
| 325 | super().__init__(options) |
| 326 | self.default_files = self.collect_default_files() |
| 327 | |
| 328 | def collect_default_files(self): |
| 329 | """Collect all files in tests/suites/*.data""" |
| 330 | test_dir = self.find_test_dir() |
| 331 | suites_data_folder = os.path.join(test_dir, 'suites') |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 332 | data_files = glob.glob(os.path.join(suites_data_folder, '*.data')) |
| 333 | return data_files |
| 334 | |
| 335 | def parse_file(self, filename: str): |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 336 | """ |
| 337 | Parse a list of AuditData from file. |
| 338 | |
| 339 | :param filename: name of the file to parse. |
| 340 | :return list of AuditData parsed from the file. |
| 341 | """ |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 342 | audit_data_list = [] |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 343 | data_f = FileWrapper(filename) |
| 344 | for _, _, _, test_args in parse_suite_data(data_f): |
Pengyu Lv | 7725c1d | 2023-04-13 15:55:30 +0800 | [diff] [blame] | 345 | for idx, test_arg in enumerate(test_args): |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 346 | match = re.match(r'"(?P<data>[0-9a-fA-F]+)"', test_arg) |
| 347 | if not match: |
| 348 | continue |
| 349 | if not X509Parser.check_hex_string(match.group('data')): |
| 350 | continue |
| 351 | audit_data = self.parse_bytes(bytes.fromhex(match.group('data'))) |
| 352 | if audit_data is None: |
| 353 | continue |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 354 | audit_data.location = "{}:{}:#{}".format(filename, |
| 355 | data_f.line_no, |
| 356 | idx + 1) |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 357 | audit_data_list.append(audit_data) |
| 358 | |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 359 | return audit_data_list |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 360 | |
| 361 | def list_all(audit_data: AuditData): |
| 362 | print("{}\t{}\t{}\t{}".format( |
| 363 | audit_data.not_valid_before.isoformat(timespec='seconds'), |
| 364 | audit_data.not_valid_after.isoformat(timespec='seconds'), |
| 365 | audit_data.data_type.name, |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 366 | audit_data.location)) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 367 | |
| 368 | def main(): |
| 369 | """ |
| 370 | Perform argument parsing. |
| 371 | """ |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 372 | parser = argparse.ArgumentParser(description=__doc__) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 373 | |
| 374 | parser.add_argument('-a', '--all', |
| 375 | action='store_true', |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 376 | help='list the information of all the files') |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 377 | parser.add_argument('-v', '--verbose', |
| 378 | action='store_true', dest='verbose', |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 379 | help='show warnings') |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 380 | parser.add_argument('--not-before', dest='not_before', |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 381 | help=('not valid before this date (UTC, YYYY-MM-DD). ' |
| 382 | 'Default: today'), |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 383 | metavar='DATE') |
| 384 | parser.add_argument('--not-after', dest='not_after', |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 385 | help=('not valid after this date (UTC, YYYY-MM-DD). ' |
| 386 | 'Default: not-before'), |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 387 | metavar='DATE') |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 388 | parser.add_argument('files', nargs='*', help='files to audit', |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 389 | metavar='FILE') |
| 390 | |
| 391 | args = parser.parse_args() |
| 392 | |
| 393 | # start main routine |
| 394 | td_auditor = TestDataAuditor(args.verbose) |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 395 | sd_auditor = SuiteDataAuditor(args.verbose) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 396 | |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 397 | if args.files: |
| 398 | data_files = args.files |
| 399 | suite_data_files = args.files |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 400 | else: |
| 401 | data_files = td_auditor.default_files |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 402 | suite_data_files = sd_auditor.default_files |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 403 | |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 404 | if args.not_before: |
| 405 | not_before_date = datetime.datetime.fromisoformat(args.not_before) |
| 406 | else: |
| 407 | not_before_date = datetime.datetime.today() |
| 408 | if args.not_after: |
| 409 | not_after_date = datetime.datetime.fromisoformat(args.not_after) |
| 410 | else: |
| 411 | not_after_date = not_before_date |
| 412 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 413 | td_auditor.walk_all(data_files) |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 414 | sd_auditor.walk_all(suite_data_files) |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 415 | audit_results = td_auditor.audit_data + sd_auditor.audit_data |
| 416 | |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 417 | # we filter out the files whose validity duration covers the provided |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 418 | # duration. |
| 419 | filter_func = lambda d: (not_before_date < d.not_valid_before) or \ |
| 420 | (d.not_valid_after < not_after_date) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 421 | |
| 422 | if args.all: |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 423 | filter_func = None |
| 424 | |
| 425 | for d in filter(filter_func, audit_results): |
| 426 | list_all(d) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 427 | |
| 428 | print("\nDone!\n") |
| 429 | |
| 430 | if __name__ == "__main__": |
| 431 | main() |