Harrison Mutai | b188524 | 2025-04-30 14:06:06 +0000 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | |
| 3 | # |
| 4 | # Copyright (c) 2024, Arm Limited. All rights reserved. |
| 5 | # |
| 6 | # SPDX-License-Identifier: BSD-3-Clause |
| 7 | # |
| 8 | |
| 9 | """Contains unit tests for the types TransferEntry and TransferList.""" |
| 10 | |
| 11 | import math |
| 12 | from random import randint |
| 13 | |
| 14 | import pytest |
| 15 | |
| 16 | from tlc.te import TransferEntry |
| 17 | from tlc.tl import TransferList |
| 18 | |
| 19 | large_data = 0xDEADBEEF.to_bytes(4, "big") |
| 20 | small_data = 0x1234.to_bytes(3, "big") |
| 21 | test_entries = [ |
| 22 | (0, b""), |
| 23 | (1, small_data), |
| 24 | (1, large_data), |
| 25 | (0xFFFFFF, small_data), |
| 26 | (0xFFFFFF, large_data), |
| 27 | ] |
| 28 | |
| 29 | |
| 30 | @pytest.mark.parametrize( |
| 31 | "size,csum", |
| 32 | [ |
| 33 | (-1, None), |
| 34 | (0x18, 0x9E), |
| 35 | (0x1000, 0xA6), |
| 36 | (0x2000, 0x96), |
| 37 | (0x4000, 0x76), |
| 38 | ], |
| 39 | ) |
| 40 | def test_make_transfer_list(size, csum): |
| 41 | if size < 8: |
| 42 | with pytest.raises(AssertionError): |
| 43 | tl = TransferList(size) |
| 44 | else: |
| 45 | tl = TransferList(size) |
| 46 | |
| 47 | assert tl.signature == 0x4A0FB10B |
| 48 | assert not tl.entries |
| 49 | assert tl.sum_of_bytes() == 0 |
| 50 | assert tl.checksum == csum |
| 51 | |
| 52 | |
| 53 | def test_add_transfer_entry(random_entries): |
| 54 | tl = TransferList(0x1000) |
| 55 | |
| 56 | # Add a single entry and check it's in the list of entries |
| 57 | te = tl.add_transfer_entry(1, bytes(100)) |
| 58 | assert te in tl.entries |
| 59 | assert tl.size % 8 == 0 |
| 60 | |
| 61 | # Add a range of tag id's |
| 62 | for id, data in random_entries(50, 1): |
| 63 | te = tl.add_transfer_entry(id, data) |
| 64 | assert te in tl.entries |
| 65 | assert tl.size % 8 == 0 |
| 66 | |
| 67 | |
| 68 | @pytest.mark.parametrize("align", [4, 6, 12, 13]) |
| 69 | def test_add_transfer_entry_with_align(align, random_entries, random_entry): |
| 70 | tl = TransferList(0xF00000) |
| 71 | id, data = random_entry(4) |
| 72 | |
| 73 | tl.add_transfer_entry(id, data) |
| 74 | |
| 75 | # Add an entry with a larger alignment requirement |
| 76 | _, data = random_entry(4) |
| 77 | te = tl.add_transfer_entry(1, data, data_align=align) |
| 78 | assert (te.offset + te.hdr_size) % (1 << align) == 0 |
| 79 | assert tl.alignment == align |
| 80 | |
| 81 | # Add some more entries and ensure the alignment is preserved |
| 82 | for id, data in random_entries(5, 0x200): |
| 83 | te = tl.add_transfer_entry(id, data, data_align=align) |
| 84 | assert (te.offset + te.hdr_size) % (1 << align) == 0 |
| 85 | assert tl.alignment == align |
| 86 | |
| 87 | |
| 88 | @pytest.mark.parametrize( |
| 89 | ("tag_id", "data"), |
| 90 | [ |
| 91 | (-1, None), # tag out of range |
| 92 | (1, None), # no data provided |
| 93 | (1, bytes(8000)), # very large data > total size |
| 94 | (0x100000, b"0dd0edfe"), # tag out of range |
| 95 | ], |
| 96 | ) |
| 97 | def test_add_out_of_range_transfer_entry(tag_id, data): |
| 98 | tl = TransferList() |
| 99 | |
| 100 | with pytest.raises(Exception): |
| 101 | tl.add_transfer_entry(tag_id, data) |
| 102 | |
| 103 | |
| 104 | @pytest.mark.parametrize(("tag_id", "data"), test_entries) |
| 105 | def test_calculate_te_sum_of_bytes(tag_id, data): |
| 106 | te = TransferEntry(tag_id, len(data), data) |
| 107 | csum = ( |
| 108 | sum(data) |
| 109 | + sum(len(data).to_bytes(4, "big")) |
| 110 | + te.hdr_size |
| 111 | + sum(tag_id.to_bytes(4, "big")) |
| 112 | ) % 256 |
| 113 | assert te.sum_of_bytes == csum |
| 114 | |
| 115 | |
| 116 | def test_calc_tl_checksum(tmpdir, random_entries): |
| 117 | tl_file = tmpdir.join("tl.bin") |
| 118 | |
| 119 | tl = TransferList(0x1000) |
| 120 | |
| 121 | for id, data in random_entries(10): |
| 122 | tl.add_transfer_entry(id, data) |
| 123 | |
| 124 | assert sum(tl.to_bytes()) % 256 == 0 |
| 125 | |
| 126 | # Write the transfer list to a file and check that the sum of bytes is 0 |
| 127 | tl.write_to_file(tl_file) |
| 128 | assert sum(tl_file.read_binary()) % 256 == 0 |
| 129 | |
| 130 | |
| 131 | def test_empty_transfer_list_blob(tmpdir): |
| 132 | """Check that we can correctly create a transfer list header.""" |
| 133 | test_file = tmpdir.join("test_tl_blob.bin") |
| 134 | tl = TransferList() |
| 135 | tl.write_to_file(test_file) |
| 136 | |
| 137 | with open(test_file, "rb") as f: |
| 138 | assert f.read(tl.hdr_size) == tl.header_to_bytes() |
| 139 | |
| 140 | |
| 141 | @pytest.mark.parametrize(("tag_id", "data"), test_entries) |
| 142 | def test_single_te_transfer_list(tag_id, data, tmpdir): |
| 143 | """Check that we can create a complete TL with a single TE.""" |
| 144 | test_file = tmpdir.join("test_tl_blob.bin") |
| 145 | tl = TransferList(0x1000) |
| 146 | |
| 147 | tl.add_transfer_entry(tag_id, data) |
| 148 | tl.write_to_file(test_file) |
| 149 | |
| 150 | te = tl.entries[0] |
| 151 | |
| 152 | with open(test_file, "rb") as f: |
| 153 | assert f.read(tl.hdr_size) == tl.header_to_bytes() |
| 154 | assert int.from_bytes(f.read(3), "little") == te.id |
| 155 | assert int.from_bytes(f.read(1), "little") == te.hdr_size |
| 156 | assert int.from_bytes(f.read(4), "little") == te.data_size |
| 157 | assert f.read(te.data_size) == te.data |
| 158 | |
| 159 | |
| 160 | def test_write_multiple_tes_to_file(tmpdir, random_entries, random_entry): |
| 161 | """Check that we can create a TL with multiple TE's.""" |
| 162 | test_file = tmpdir.join("test_tl_blob.bin") |
| 163 | tl = TransferList(0x4000) |
| 164 | _test_entries = list(random_entries()) |
| 165 | |
| 166 | for tag_id, data in _test_entries: |
| 167 | tl.add_transfer_entry(tag_id, data) |
| 168 | |
| 169 | # Add a few entries with special alignment requirements |
| 170 | blob_id, blob = random_entry(0x200) |
| 171 | tl.add_transfer_entry(blob_id, blob, data_align=12) |
| 172 | |
| 173 | tl.write_to_file(test_file) |
| 174 | |
| 175 | with open(test_file, "rb") as f: |
| 176 | assert f.read(tl.hdr_size) == tl.header_to_bytes() |
| 177 | # Ensure that TE's have the correct alignment |
| 178 | for tag_id, data in _test_entries: |
| 179 | f.seek(int(math.ceil(f.tell() / 8) * 8)) |
| 180 | |
| 181 | assert int.from_bytes(f.read(3), "little") == tag_id |
| 182 | assert int.from_bytes(f.read(1), "little") == TransferEntry.hdr_size |
| 183 | # Make sure the data in the TE matches the data in the original case |
| 184 | data_size = int.from_bytes(f.read(4), "little") |
| 185 | assert f.read(data_size) == data |
| 186 | |
| 187 | f.seek(int(math.ceil(f.tell() / (1 << 12)) * (1 << 12)) - 8) |
| 188 | assert int.from_bytes(f.read(3), "little") == blob_id |
| 189 | assert int.from_bytes(f.read(1), "little") == TransferEntry.hdr_size |
| 190 | # Make sure the data in the TE matches the data in the original case |
| 191 | data_size = int.from_bytes(f.read(4), "little") |
| 192 | assert f.read(data_size) == blob |
| 193 | |
| 194 | # padding is added to align TE's, make sure padding is added to the size of |
| 195 | # the TL by checking we don't overflow. |
| 196 | assert f.tell() <= tl.size |
| 197 | |
| 198 | |
| 199 | def test_read_empty_transfer_list_from_file(tmpdir): |
| 200 | test_file = tmpdir.join("test_tl_blob.bin") |
| 201 | original_tl = TransferList(0x1000) |
| 202 | original_tl.write_to_file(test_file) |
| 203 | |
| 204 | # Read the contents of the file we just wrote |
| 205 | tl = TransferList.fromfile(test_file) |
| 206 | assert tl.header_to_bytes() == original_tl.header_to_bytes() |
| 207 | assert tl.sum_of_bytes() == 0 |
| 208 | |
| 209 | |
| 210 | def test_read_single_transfer_list_from_file(tmpdir): |
| 211 | test_file = tmpdir.join("test_tl_blob.bin") |
| 212 | original_tl = TransferList(0x1000) |
| 213 | |
| 214 | original_tl.add_transfer_entry(test_entries[0][0], test_entries[0][1]) |
| 215 | original_tl.write_to_file(test_file) |
| 216 | |
| 217 | # Read the contents of the file we just wrote |
| 218 | tl = TransferList.fromfile(test_file) |
| 219 | assert tl.entries |
| 220 | |
| 221 | te = tl.entries[0] |
| 222 | assert te.id == test_entries[0][0] |
| 223 | assert te.data == test_entries[0][1] |
| 224 | assert tl.sum_of_bytes() == 0 |
| 225 | |
| 226 | |
| 227 | def test_read_multiple_transfer_list_from_file(tmpdir): |
| 228 | test_file = tmpdir.join("test_tl_blob.bin") |
| 229 | original_tl = TransferList(0x1000) |
| 230 | |
| 231 | for tag_id, data in test_entries: |
| 232 | original_tl.add_transfer_entry(tag_id, data) |
| 233 | |
| 234 | original_tl.write_to_file(test_file) |
| 235 | |
| 236 | # Read the contents of the file we just wrote |
| 237 | tl = TransferList.fromfile(test_file) |
| 238 | |
| 239 | # The TE we derive from the file might have a an associated offset, compare |
| 240 | # the TE's based on the header in bytes, which doesn't account for this. |
| 241 | for te0, te1 in zip(tl.entries, original_tl.entries): |
| 242 | assert te0.header_to_bytes() == te1.header_to_bytes() |
| 243 | |
| 244 | assert tl.sum_of_bytes() == 0 |
| 245 | |
| 246 | |
| 247 | def test_remove_tag(random_entry): |
| 248 | """Adds a transfer entry and remove it, size == transfer list header.""" |
| 249 | tl = TransferList(0x100) |
| 250 | id, data = random_entry(tl.total_size // 2) |
| 251 | |
| 252 | te = tl.add_transfer_entry(id, data) |
| 253 | assert te in tl.entries |
| 254 | |
| 255 | tl.remove_tag(id) |
| 256 | assert not tl.get_entry(id) and te not in tl.entries |
| 257 | assert tl.size == tl.hdr_size |
| 258 | |
| 259 | |
| 260 | def test_get_fdt_offset(tmpdir): |
| 261 | tl = TransferList(0x1000) |
| 262 | tl.add_transfer_entry(1, 0xEDFE0DD0.to_bytes(4, "big")) |
| 263 | f = tmpdir.join("blob.bin") |
| 264 | |
| 265 | tl.write_to_file(f) |
| 266 | |
| 267 | blob_tl = TransferList.fromfile(f) |
| 268 | |
| 269 | assert blob_tl.hdr_size + TransferEntry.hdr_size == blob_tl.get_entry_data_offset(1) |
| 270 | |
| 271 | |
| 272 | def test_get_missing_fdt_offset(tmpdir): |
| 273 | tl = TransferList(0x1000) |
| 274 | f = tmpdir.join("blob.bin") |
| 275 | |
| 276 | tl.write_to_file(f) |
| 277 | blob_tl = TransferList.fromfile(f) |
| 278 | |
| 279 | with pytest.raises(ValueError): |
| 280 | blob_tl.get_entry_data_offset(1) |