Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | # |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 3 | # Copyright The Mbed TLS Contributors |
| 4 | # SPDX-License-Identifier: Apache-2.0 |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 5 | # |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); you may |
| 7 | # not use this file except in compliance with the License. |
| 8 | # You may obtain a copy of the License at |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 9 | # |
| 10 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | # |
| 12 | # Unless required by applicable law or agreed to in writing, software |
| 13 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| 14 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | # See the License for the specific language governing permissions and |
| 16 | # limitations under the License. |
| 17 | |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 18 | """Audit validity date of X509 crt/crl/csr. |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 19 | |
| 20 | This script is used to audit the validity date of crt/crl/csr used for testing. |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 21 | It prints the information about X.509 objects excluding the objects that |
| 22 | are valid throughout the desired validity period. The data are collected |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 23 | from tests/data_files/ and tests/suites/*.data files by default. |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 24 | """ |
| 25 | |
| 26 | import os |
| 27 | import sys |
| 28 | import re |
| 29 | import typing |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 30 | import argparse |
| 31 | import datetime |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 32 | import glob |
Pengyu Lv | fcda6d4 | 2023-04-21 11:04:07 +0800 | [diff] [blame] | 33 | import logging |
Pengyu Lv | 13f2ef4 | 2023-05-05 16:53:37 +0800 | [diff] [blame^] | 34 | import hashlib |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 35 | from enum import Enum |
| 36 | |
Pengyu Lv | 3179232 | 2023-04-11 16:30:54 +0800 | [diff] [blame] | 37 | # The script requires cryptography >= 35.0.0 which is only available |
Pengyu Lv | 1381598 | 2023-04-25 14:55:38 +0800 | [diff] [blame] | 38 | # for Python >= 3.6. |
| 39 | import cryptography |
| 40 | from cryptography import x509 |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 41 | |
Pengyu Lv | ad30679 | 2023-04-19 15:07:03 +0800 | [diff] [blame] | 42 | from generate_test_code import FileWrapper |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 43 | |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 44 | import scripts_path # pylint: disable=unused-import |
| 45 | from mbedtls_dev import build_tree |
| 46 | |
Pengyu Lv | 1381598 | 2023-04-25 14:55:38 +0800 | [diff] [blame] | 47 | def check_cryptography_version(): |
| 48 | match = re.match(r'^[0-9]+', cryptography.__version__) |
Pengyu Lv | fd72d9f | 2023-04-28 11:17:24 +0800 | [diff] [blame] | 49 | if match is None or int(match.group(0)) < 35: |
Pengyu Lv | 1381598 | 2023-04-25 14:55:38 +0800 | [diff] [blame] | 50 | raise Exception("audit-validity-dates requires cryptography >= 35.0.0" |
| 51 | + "({} is too old)".format(cryptography.__version__)) |
| 52 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 53 | class DataType(Enum): |
| 54 | CRT = 1 # Certificate |
| 55 | CRL = 2 # Certificate Revocation List |
| 56 | CSR = 3 # Certificate Signing Request |
| 57 | |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 58 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 59 | class DataFormat(Enum): |
| 60 | PEM = 1 # Privacy-Enhanced Mail |
| 61 | DER = 2 # Distinguished Encoding Rules |
| 62 | |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 63 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 64 | class AuditData: |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 65 | """Store data location, type and validity period of X.509 objects.""" |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 66 | #pylint: disable=too-few-public-methods |
Pengyu Lv | cb8fc32 | 2023-04-11 15:05:29 +0800 | [diff] [blame] | 67 | def __init__(self, data_type: DataType, x509_obj): |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 68 | self.data_type = data_type |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 69 | # the locations that the x509 object could be found |
| 70 | self.locations = [] # type: typing.List[str] |
Pengyu Lv | cb8fc32 | 2023-04-11 15:05:29 +0800 | [diff] [blame] | 71 | self.fill_validity_duration(x509_obj) |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 72 | self._obj = x509_obj |
Pengyu Lv | 13f2ef4 | 2023-05-05 16:53:37 +0800 | [diff] [blame^] | 73 | encoding = cryptography.hazmat.primitives.serialization.Encoding.DER |
| 74 | self._identifier = hashlib.sha1(self._obj.public_bytes(encoding)).hexdigest() |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 75 | |
| 76 | def __eq__(self, __value) -> bool: |
| 77 | return self._obj == __value._obj |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 78 | |
Pengyu Lv | 13f2ef4 | 2023-05-05 16:53:37 +0800 | [diff] [blame^] | 79 | @property |
| 80 | def identifier(self): |
| 81 | """ |
| 82 | Identifier of the underlying X.509 object, which is consistent across |
| 83 | different runs. |
| 84 | """ |
| 85 | return self._identifier |
| 86 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 87 | def fill_validity_duration(self, x509_obj): |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 88 | """Read validity period from an X.509 object.""" |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 89 | # Certificate expires after "not_valid_after" |
| 90 | # Certificate is invalid before "not_valid_before" |
| 91 | if self.data_type == DataType.CRT: |
| 92 | self.not_valid_after = x509_obj.not_valid_after |
| 93 | self.not_valid_before = x509_obj.not_valid_before |
| 94 | # CertificateRevocationList expires after "next_update" |
| 95 | # CertificateRevocationList is invalid before "last_update" |
| 96 | elif self.data_type == DataType.CRL: |
| 97 | self.not_valid_after = x509_obj.next_update |
| 98 | self.not_valid_before = x509_obj.last_update |
| 99 | # CertificateSigningRequest is always valid. |
| 100 | elif self.data_type == DataType.CSR: |
| 101 | self.not_valid_after = datetime.datetime.max |
| 102 | self.not_valid_before = datetime.datetime.min |
| 103 | else: |
| 104 | raise ValueError("Unsupported file_type: {}".format(self.data_type)) |
| 105 | |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 106 | |
Pengyu Lv | f8e5e05 | 2023-04-18 15:43:25 +0800 | [diff] [blame] | 107 | class X509Parser: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 108 | """A parser class to parse crt/crl/csr file or data in PEM/DER format.""" |
Pengyu Lv | e245c0c | 2023-04-28 10:46:18 +0800 | [diff] [blame] | 109 | PEM_REGEX = br'-{5}BEGIN (?P<type>.*?)-{5}(?P<data>.*?)-{5}END (?P=type)-{5}' |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 110 | PEM_TAG_REGEX = br'-{5}BEGIN (?P<type>.*?)-{5}\n' |
| 111 | PEM_TAGS = { |
| 112 | DataType.CRT: 'CERTIFICATE', |
| 113 | DataType.CRL: 'X509 CRL', |
| 114 | DataType.CSR: 'CERTIFICATE REQUEST' |
| 115 | } |
| 116 | |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 117 | def __init__(self, |
| 118 | backends: |
| 119 | typing.Dict[DataType, |
| 120 | typing.Dict[DataFormat, |
| 121 | typing.Callable[[bytes], object]]]) \ |
| 122 | -> None: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 123 | self.backends = backends |
| 124 | self.__generate_parsers() |
| 125 | |
| 126 | def __generate_parser(self, data_type: DataType): |
| 127 | """Parser generator for a specific DataType""" |
| 128 | tag = self.PEM_TAGS[data_type] |
| 129 | pem_loader = self.backends[data_type][DataFormat.PEM] |
| 130 | der_loader = self.backends[data_type][DataFormat.DER] |
| 131 | def wrapper(data: bytes): |
| 132 | pem_type = X509Parser.pem_data_type(data) |
| 133 | # It is in PEM format with target tag |
| 134 | if pem_type == tag: |
| 135 | return pem_loader(data) |
| 136 | # It is in PEM format without target tag |
| 137 | if pem_type: |
| 138 | return None |
| 139 | # It might be in DER format |
| 140 | try: |
| 141 | result = der_loader(data) |
| 142 | except ValueError: |
| 143 | result = None |
| 144 | return result |
| 145 | wrapper.__name__ = "{}.parser[{}]".format(type(self).__name__, tag) |
| 146 | return wrapper |
| 147 | |
| 148 | def __generate_parsers(self): |
| 149 | """Generate parsers for all support DataType""" |
| 150 | self.parsers = {} |
| 151 | for data_type, _ in self.PEM_TAGS.items(): |
| 152 | self.parsers[data_type] = self.__generate_parser(data_type) |
| 153 | |
| 154 | def __getitem__(self, item): |
| 155 | return self.parsers[item] |
| 156 | |
| 157 | @staticmethod |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 158 | def pem_data_type(data: bytes) -> typing.Optional[str]: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 159 | """Get the tag from the data in PEM format |
| 160 | |
| 161 | :param data: data to be checked in binary mode. |
| 162 | :return: PEM tag or "" when no tag detected. |
| 163 | """ |
| 164 | m = re.search(X509Parser.PEM_TAG_REGEX, data) |
| 165 | if m is not None: |
| 166 | return m.group('type').decode('UTF-8') |
| 167 | else: |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 168 | return None |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 169 | |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 170 | @staticmethod |
| 171 | def check_hex_string(hex_str: str) -> bool: |
| 172 | """Check if the hex string is possibly DER data.""" |
| 173 | hex_len = len(hex_str) |
| 174 | # At least 6 hex char for 3 bytes: Type + Length + Content |
| 175 | if hex_len < 6: |
| 176 | return False |
| 177 | # Check if Type (1 byte) is SEQUENCE. |
| 178 | if hex_str[0:2] != '30': |
| 179 | return False |
| 180 | # Check LENGTH (1 byte) value |
| 181 | content_len = int(hex_str[2:4], base=16) |
| 182 | consumed = 4 |
| 183 | if content_len in (128, 255): |
| 184 | # Indefinite or Reserved |
| 185 | return False |
| 186 | elif content_len > 127: |
| 187 | # Definite, Long |
| 188 | length_len = (content_len - 128) * 2 |
| 189 | content_len = int(hex_str[consumed:consumed+length_len], base=16) |
| 190 | consumed += length_len |
| 191 | # Check LENGTH |
| 192 | if hex_len != content_len * 2 + consumed: |
| 193 | return False |
| 194 | return True |
| 195 | |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 196 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 197 | class Auditor: |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 198 | """ |
| 199 | A base class that uses X509Parser to parse files to a list of AuditData. |
| 200 | |
| 201 | A subclass must implement the following methods: |
| 202 | - collect_default_files: Return a list of file names that are defaultly |
| 203 | used for parsing (auditing). The list will be stored in |
| 204 | Auditor.default_files. |
| 205 | - parse_file: Method that parses a single file to a list of AuditData. |
| 206 | |
| 207 | A subclass may override the following methods: |
| 208 | - parse_bytes: Defaultly, it parses `bytes` that contains only one valid |
| 209 | X.509 data(DER/PEM format) to an X.509 object. |
| 210 | - walk_all: Defaultly, it iterates over all the files in the provided |
| 211 | file name list, calls `parse_file` for each file and stores the results |
| 212 | by extending Auditor.audit_data. |
| 213 | """ |
Pengyu Lv | fcda6d4 | 2023-04-21 11:04:07 +0800 | [diff] [blame] | 214 | def __init__(self, logger): |
| 215 | self.logger = logger |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 216 | self.default_files = self.collect_default_files() |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 217 | # A list to store the parsed audit_data. |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 218 | self.audit_data = [] # type: typing.List[AuditData] |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 219 | self.parser = X509Parser({ |
| 220 | DataType.CRT: { |
| 221 | DataFormat.PEM: x509.load_pem_x509_certificate, |
| 222 | DataFormat.DER: x509.load_der_x509_certificate |
| 223 | }, |
| 224 | DataType.CRL: { |
| 225 | DataFormat.PEM: x509.load_pem_x509_crl, |
| 226 | DataFormat.DER: x509.load_der_x509_crl |
| 227 | }, |
| 228 | DataType.CSR: { |
| 229 | DataFormat.PEM: x509.load_pem_x509_csr, |
| 230 | DataFormat.DER: x509.load_der_x509_csr |
| 231 | }, |
| 232 | }) |
| 233 | |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 234 | def collect_default_files(self) -> typing.List[str]: |
| 235 | """Collect the default files for parsing.""" |
| 236 | raise NotImplementedError |
| 237 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 238 | def parse_file(self, filename: str) -> typing.List[AuditData]: |
| 239 | """ |
| 240 | Parse a list of AuditData from file. |
| 241 | |
| 242 | :param filename: name of the file to parse. |
| 243 | :return list of AuditData parsed from the file. |
| 244 | """ |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 245 | raise NotImplementedError |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 246 | |
| 247 | def parse_bytes(self, data: bytes): |
| 248 | """Parse AuditData from bytes.""" |
| 249 | for data_type in list(DataType): |
| 250 | try: |
| 251 | result = self.parser[data_type](data) |
| 252 | except ValueError as val_error: |
| 253 | result = None |
Pengyu Lv | fcda6d4 | 2023-04-21 11:04:07 +0800 | [diff] [blame] | 254 | self.logger.warning(val_error) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 255 | if result is not None: |
Pengyu Lv | cb8fc32 | 2023-04-11 15:05:29 +0800 | [diff] [blame] | 256 | audit_data = AuditData(data_type, result) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 257 | return audit_data |
| 258 | return None |
| 259 | |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 260 | def walk_all(self, file_list: typing.Optional[typing.List[str]] = None): |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 261 | """ |
| 262 | Iterate over all the files in the list and get audit data. |
| 263 | """ |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 264 | if file_list is None: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 265 | file_list = self.default_files |
| 266 | for filename in file_list: |
| 267 | data_list = self.parse_file(filename) |
| 268 | self.audit_data.extend(data_list) |
| 269 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 270 | @staticmethod |
| 271 | def find_test_dir(): |
| 272 | """Get the relative path for the MbedTLS test directory.""" |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 273 | return os.path.relpath(build_tree.guess_mbedtls_root() + '/tests') |
| 274 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 275 | |
| 276 | class TestDataAuditor(Auditor): |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 277 | """Class for auditing files in `tests/data_files/`""" |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 278 | |
| 279 | def collect_default_files(self): |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 280 | """Collect all files in `tests/data_files/`""" |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 281 | test_dir = self.find_test_dir() |
Pengyu Lv | 8e6794a | 2023-04-18 17:00:47 +0800 | [diff] [blame] | 282 | test_data_glob = os.path.join(test_dir, 'data_files/**') |
| 283 | data_files = [f for f in glob.glob(test_data_glob, recursive=True) |
| 284 | if os.path.isfile(f)] |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 285 | return data_files |
| 286 | |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 287 | def parse_file(self, filename: str) -> typing.List[AuditData]: |
| 288 | """ |
| 289 | Parse a list of AuditData from data file. |
| 290 | |
| 291 | :param filename: name of the file to parse. |
| 292 | :return list of AuditData parsed from the file. |
| 293 | """ |
| 294 | with open(filename, 'rb') as f: |
| 295 | data = f.read() |
Pengyu Lv | e245c0c | 2023-04-28 10:46:18 +0800 | [diff] [blame] | 296 | |
| 297 | results = [] |
| 298 | for idx, m in enumerate(re.finditer(X509Parser.PEM_REGEX, data, flags=re.S), 1): |
| 299 | result = self.parse_bytes(data[m.start():m.end()]) |
| 300 | if result is not None: |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 301 | result.locations.append("{}#{}".format(filename, idx)) |
Pengyu Lv | e245c0c | 2023-04-28 10:46:18 +0800 | [diff] [blame] | 302 | results.append(result) |
| 303 | |
| 304 | return results |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 305 | |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 306 | |
Pengyu Lv | 28fe957 | 2023-04-23 13:56:25 +0800 | [diff] [blame] | 307 | def parse_suite_data(data_f): |
| 308 | """ |
| 309 | Parses .data file for test arguments that possiblly have a |
| 310 | valid X.509 data. If you need a more precise parser, please |
| 311 | use generate_test_code.parse_test_data instead. |
| 312 | |
| 313 | :param data_f: file object of the data file. |
| 314 | :return: Generator that yields test function argument list. |
| 315 | """ |
| 316 | for line in data_f: |
| 317 | line = line.strip() |
| 318 | # Skip comments |
| 319 | if line.startswith('#'): |
| 320 | continue |
| 321 | |
| 322 | # Check parameters line |
| 323 | match = re.search(r'\A\w+(.*:)?\"', line) |
| 324 | if match: |
| 325 | # Read test vectors |
| 326 | parts = re.split(r'(?<!\\):', line) |
| 327 | parts = [x for x in parts if x] |
| 328 | args = parts[1:] |
| 329 | yield args |
| 330 | |
| 331 | |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 332 | class SuiteDataAuditor(Auditor): |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 333 | """Class for auditing files in `tests/suites/*.data`""" |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 334 | |
| 335 | def collect_default_files(self): |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 336 | """Collect all files in `tests/suites/*.data`""" |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 337 | test_dir = self.find_test_dir() |
| 338 | suites_data_folder = os.path.join(test_dir, 'suites') |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 339 | data_files = glob.glob(os.path.join(suites_data_folder, '*.data')) |
| 340 | return data_files |
| 341 | |
| 342 | def parse_file(self, filename: str): |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 343 | """ |
Pengyu Lv | c34b9ac | 2023-04-23 14:51:18 +0800 | [diff] [blame] | 344 | Parse a list of AuditData from test suite data file. |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 345 | |
| 346 | :param filename: name of the file to parse. |
| 347 | :return list of AuditData parsed from the file. |
| 348 | """ |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 349 | audit_data_list = [] |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 350 | data_f = FileWrapper(filename) |
Pengyu Lv | 28fe957 | 2023-04-23 13:56:25 +0800 | [diff] [blame] | 351 | for test_args in parse_suite_data(data_f): |
Pengyu Lv | 7725c1d | 2023-04-13 15:55:30 +0800 | [diff] [blame] | 352 | for idx, test_arg in enumerate(test_args): |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 353 | match = re.match(r'"(?P<data>[0-9a-fA-F]+)"', test_arg) |
| 354 | if not match: |
| 355 | continue |
| 356 | if not X509Parser.check_hex_string(match.group('data')): |
| 357 | continue |
| 358 | audit_data = self.parse_bytes(bytes.fromhex(match.group('data'))) |
| 359 | if audit_data is None: |
| 360 | continue |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 361 | audit_data.locations.append("{}:{}:#{}".format(filename, |
| 362 | data_f.line_no, |
| 363 | idx + 1)) |
Pengyu Lv | 30f2683 | 2023-04-07 18:04:07 +0800 | [diff] [blame] | 364 | audit_data_list.append(audit_data) |
| 365 | |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 366 | return audit_data_list |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 367 | |
Pengyu Lv | 2d48721 | 2023-04-21 12:41:24 +0800 | [diff] [blame] | 368 | |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 369 | def merge_auditdata(original: typing.List[AuditData]) \ |
| 370 | -> typing.List[AuditData]: |
| 371 | """ |
| 372 | Multiple AuditData might be extracted from different locations for |
| 373 | an identical X.509 object. Merge them into one entry in the list. |
| 374 | """ |
| 375 | results = [] |
| 376 | for x in original: |
| 377 | if x not in results: |
| 378 | results.append(x) |
| 379 | else: |
| 380 | idx = results.index(x) |
| 381 | results[idx].locations.extend(x.locations) |
| 382 | return results |
| 383 | |
| 384 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 385 | def list_all(audit_data: AuditData): |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 386 | print("{:20}\t{:20}\t{:3}\t{}".format( |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 387 | audit_data.not_valid_before.isoformat(timespec='seconds'), |
| 388 | audit_data.not_valid_after.isoformat(timespec='seconds'), |
| 389 | audit_data.data_type.name, |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 390 | audit_data.locations[0])) |
| 391 | for loc in audit_data.locations[1:]: |
| 392 | print("{:20}\t{:20}\t{:3}\t{}".format('', '', '', loc)) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 393 | |
Pengyu Lv | fcda6d4 | 2023-04-21 11:04:07 +0800 | [diff] [blame] | 394 | |
| 395 | def configure_logger(logger: logging.Logger) -> None: |
| 396 | """ |
| 397 | Configure the logging.Logger instance so that: |
| 398 | - Format is set to "[%(levelname)s]: %(message)s". |
| 399 | - loglevel >= WARNING are printed to stderr. |
| 400 | - loglevel < WARNING are printed to stdout. |
| 401 | """ |
| 402 | class MaxLevelFilter(logging.Filter): |
| 403 | # pylint: disable=too-few-public-methods |
| 404 | def __init__(self, max_level, name=''): |
| 405 | super().__init__(name) |
| 406 | self.max_level = max_level |
| 407 | |
| 408 | def filter(self, record: logging.LogRecord) -> bool: |
| 409 | return record.levelno <= self.max_level |
| 410 | |
| 411 | log_formatter = logging.Formatter("[%(levelname)s]: %(message)s") |
| 412 | |
| 413 | # set loglevel >= WARNING to be printed to stderr |
| 414 | stderr_hdlr = logging.StreamHandler(sys.stderr) |
| 415 | stderr_hdlr.setLevel(logging.WARNING) |
| 416 | stderr_hdlr.setFormatter(log_formatter) |
| 417 | |
| 418 | # set loglevel <= INFO to be printed to stdout |
| 419 | stdout_hdlr = logging.StreamHandler(sys.stdout) |
| 420 | stdout_hdlr.addFilter(MaxLevelFilter(logging.INFO)) |
| 421 | stdout_hdlr.setFormatter(log_formatter) |
| 422 | |
| 423 | logger.addHandler(stderr_hdlr) |
| 424 | logger.addHandler(stdout_hdlr) |
| 425 | |
| 426 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 427 | def main(): |
| 428 | """ |
| 429 | Perform argument parsing. |
| 430 | """ |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 431 | parser = argparse.ArgumentParser(description=__doc__) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 432 | |
| 433 | parser.add_argument('-a', '--all', |
| 434 | action='store_true', |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 435 | help='list the information of all the files') |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 436 | parser.add_argument('-v', '--verbose', |
| 437 | action='store_true', dest='verbose', |
Pengyu Lv | fcda6d4 | 2023-04-21 11:04:07 +0800 | [diff] [blame] | 438 | help='show logs') |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 439 | parser.add_argument('--from', dest='start_date', |
| 440 | help=('Start of desired validity period (UTC, YYYY-MM-DD). ' |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 441 | 'Default: today'), |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 442 | metavar='DATE') |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 443 | parser.add_argument('--to', dest='end_date', |
| 444 | help=('End of desired validity period (UTC, YYYY-MM-DD). ' |
| 445 | 'Default: --from'), |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 446 | metavar='DATE') |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 447 | parser.add_argument('--data-files', action='append', nargs='*', |
| 448 | help='data files to audit', |
| 449 | metavar='FILE') |
| 450 | parser.add_argument('--suite-data-files', action='append', nargs='*', |
| 451 | help='suite data files to audit', |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 452 | metavar='FILE') |
| 453 | |
| 454 | args = parser.parse_args() |
| 455 | |
| 456 | # start main routine |
Pengyu Lv | fcda6d4 | 2023-04-21 11:04:07 +0800 | [diff] [blame] | 457 | # setup logger |
| 458 | logger = logging.getLogger() |
| 459 | configure_logger(logger) |
| 460 | logger.setLevel(logging.DEBUG if args.verbose else logging.ERROR) |
| 461 | |
| 462 | td_auditor = TestDataAuditor(logger) |
| 463 | sd_auditor = SuiteDataAuditor(logger) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 464 | |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 465 | data_files = [] |
| 466 | suite_data_files = [] |
| 467 | if args.data_files is None and args.suite_data_files is None: |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 468 | data_files = td_auditor.default_files |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 469 | suite_data_files = sd_auditor.default_files |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 470 | else: |
| 471 | if args.data_files is not None: |
| 472 | data_files = [x for l in args.data_files for x in l] |
| 473 | if args.suite_data_files is not None: |
| 474 | suite_data_files = [x for l in args.suite_data_files for x in l] |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 475 | |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 476 | # validity period start date |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 477 | if args.start_date: |
| 478 | start_date = datetime.datetime.fromisoformat(args.start_date) |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 479 | else: |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 480 | start_date = datetime.datetime.today() |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 481 | # validity period end date |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 482 | if args.end_date: |
| 483 | end_date = datetime.datetime.fromisoformat(args.end_date) |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 484 | else: |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 485 | end_date = start_date |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 486 | |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 487 | # go through all the files |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 488 | td_auditor.walk_all(data_files) |
Pengyu Lv | 45e3203 | 2023-04-06 14:33:41 +0800 | [diff] [blame] | 489 | sd_auditor.walk_all(suite_data_files) |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 490 | audit_results = td_auditor.audit_data + sd_auditor.audit_data |
| 491 | |
Pengyu Lv | fe13bd3 | 2023-04-28 10:58:38 +0800 | [diff] [blame] | 492 | audit_results = merge_auditdata(audit_results) |
| 493 | |
| 494 | logger.info("Total: {} objects found!".format(len(audit_results))) |
| 495 | |
Pengyu Lv | 5724095 | 2023-04-13 14:42:37 +0800 | [diff] [blame] | 496 | # we filter out the files whose validity duration covers the provided |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 497 | # duration. |
Pengyu Lv | 1d4cc91 | 2023-04-25 15:17:19 +0800 | [diff] [blame] | 498 | filter_func = lambda d: (start_date < d.not_valid_before) or \ |
| 499 | (d.not_valid_after < end_date) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 500 | |
Pengyu Lv | 0b4832b | 2023-04-28 11:14:28 +0800 | [diff] [blame] | 501 | sortby_end = lambda d: d.not_valid_after |
| 502 | |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 503 | if args.all: |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 504 | filter_func = None |
| 505 | |
Pengyu Lv | a228cbc | 2023-04-21 11:59:25 +0800 | [diff] [blame] | 506 | # filter and output the results |
Pengyu Lv | 0b4832b | 2023-04-28 11:14:28 +0800 | [diff] [blame] | 507 | for d in sorted(filter(filter_func, audit_results), key=sortby_end): |
Pengyu Lv | ebf011f | 2023-04-11 13:39:31 +0800 | [diff] [blame] | 508 | list_all(d) |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 509 | |
Pengyu Lv | fcda6d4 | 2023-04-21 11:04:07 +0800 | [diff] [blame] | 510 | logger.debug("Done!") |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 511 | |
Pengyu Lv | 1381598 | 2023-04-25 14:55:38 +0800 | [diff] [blame] | 512 | check_cryptography_version() |
Pengyu Lv | 7f6933a | 2023-04-04 16:05:54 +0800 | [diff] [blame] | 513 | if __name__ == "__main__": |
| 514 | main() |